[ 452.895936] env[61999]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61999) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 452.896281] env[61999]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61999) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 452.896392] env[61999]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61999) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 452.896683] env[61999]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 452.990127] env[61999]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61999) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 453.000207] env[61999]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61999) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 453.603184] env[61999]: INFO nova.virt.driver [None req-719dc5c9-ec5c-4494-9466-a7f8e83ad560 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 453.674285] env[61999]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 453.674453] env[61999]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 453.674551] env[61999]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61999) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 456.759618] env[61999]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-d4ba36ee-d662-4277-920b-99c63c95a9c6 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.775696] env[61999]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61999) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 456.775886] env[61999]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-479b0489-7e7a-408b-9cb9-339f96c86ffa {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.807104] env[61999]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 9ee80. [ 456.807258] env[61999]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.133s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 456.807760] env[61999]: INFO nova.virt.vmwareapi.driver [None req-719dc5c9-ec5c-4494-9466-a7f8e83ad560 None None] VMware vCenter version: 7.0.3 [ 456.811153] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe8da56-312c-46bf-a1b9-301d3720679c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.827967] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa9f59e-8bae-4902-93db-6c2f8ebd90cf {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.833695] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a762d318-f023-4f50-b884-46414a4c37c1 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.840237] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d784d787-da1a-42c5-bf2a-7d11ad395e42 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.853383] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea6684a-cd6d-4341-b602-95754383be95 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.859058] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6133c49-51e3-46f3-a1f4-9e140d85fe77 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.888538] env[61999]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-151135cb-6b0f-44cd-8941-af382f0aa512 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.893485] env[61999]: DEBUG nova.virt.vmwareapi.driver [None req-719dc5c9-ec5c-4494-9466-a7f8e83ad560 None None] Extension org.openstack.compute already exists. {{(pid=61999) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:227}} [ 456.896062] env[61999]: INFO nova.compute.provider_config [None req-719dc5c9-ec5c-4494-9466-a7f8e83ad560 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 457.399067] env[61999]: DEBUG nova.context [None req-719dc5c9-ec5c-4494-9466-a7f8e83ad560 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),a7ebc93f-a17b-417a-9366-d1023a01ec9b(cell1) {{(pid=61999) load_cells /opt/stack/nova/nova/context.py:464}} [ 457.401416] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 457.402029] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 457.402400] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 457.403047] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Acquiring lock "a7ebc93f-a17b-417a-9366-d1023a01ec9b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 457.403177] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Lock "a7ebc93f-a17b-417a-9366-d1023a01ec9b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 457.404123] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Lock "a7ebc93f-a17b-417a-9366-d1023a01ec9b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 457.427261] env[61999]: INFO dbcounter [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Registered counter for database nova_cell0 [ 457.435886] env[61999]: INFO dbcounter [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Registered counter for database nova_cell1 [ 457.439567] env[61999]: DEBUG oslo_db.sqlalchemy.engines [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61999) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 457.439940] env[61999]: DEBUG oslo_db.sqlalchemy.engines [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61999) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 457.444926] env[61999]: ERROR nova.db.main.api [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 457.444926] env[61999]: result = function(*args, **kwargs) [ 457.444926] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 457.444926] env[61999]: return func(*args, **kwargs) [ 457.444926] env[61999]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 457.444926] env[61999]: result = fn(*args, **kwargs) [ 457.444926] env[61999]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 457.444926] env[61999]: return f(*args, **kwargs) [ 457.444926] env[61999]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 457.444926] env[61999]: return db.service_get_minimum_version(context, binaries) [ 457.444926] env[61999]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 457.444926] env[61999]: _check_db_access() [ 457.444926] env[61999]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 457.444926] env[61999]: stacktrace = ''.join(traceback.format_stack()) [ 457.444926] env[61999]: [ 457.445749] env[61999]: ERROR nova.db.main.api [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 457.445749] env[61999]: result = function(*args, **kwargs) [ 457.445749] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 457.445749] env[61999]: return func(*args, **kwargs) [ 457.445749] env[61999]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 457.445749] env[61999]: result = fn(*args, **kwargs) [ 457.445749] env[61999]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 457.445749] env[61999]: return f(*args, **kwargs) [ 457.445749] env[61999]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 457.445749] env[61999]: return db.service_get_minimum_version(context, binaries) [ 457.445749] env[61999]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 457.445749] env[61999]: _check_db_access() [ 457.445749] env[61999]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 457.445749] env[61999]: stacktrace = ''.join(traceback.format_stack()) [ 457.445749] env[61999]: [ 457.446151] env[61999]: WARNING nova.objects.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Failed to get minimum service version for cell a7ebc93f-a17b-417a-9366-d1023a01ec9b [ 457.446338] env[61999]: WARNING nova.objects.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 457.446787] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Acquiring lock "singleton_lock" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 457.446947] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Acquired lock "singleton_lock" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 457.447214] env[61999]: DEBUG oslo_concurrency.lockutils [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Releasing lock "singleton_lock" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 457.447542] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Full set of CONF: {{(pid=61999) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 457.447686] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ******************************************************************************** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 457.447811] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Configuration options gathered from: {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 457.447973] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 457.448185] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 457.448313] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ================================================================================ {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 457.448521] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] allow_resize_to_same_host = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.448696] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] arq_binding_timeout = 300 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.448819] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] backdoor_port = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.448985] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] backdoor_socket = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.449185] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] block_device_allocate_retries = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.449350] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] block_device_allocate_retries_interval = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.449521] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cert = self.pem {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.449690] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.449860] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute_monitors = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.450041] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] config_dir = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.450223] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] config_drive_format = iso9660 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.450358] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.450523] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] config_source = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.450691] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] console_host = devstack {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.450856] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] control_exchange = nova {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.451025] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cpu_allocation_ratio = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.451192] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] daemon = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.451358] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] debug = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.451516] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] default_access_ip_network_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.451680] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] default_availability_zone = nova {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.451835] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] default_ephemeral_format = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.451993] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] default_green_pool_size = 1000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.452250] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.452417] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] default_schedule_zone = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.452576] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] disk_allocation_ratio = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.452734] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] enable_new_services = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.453036] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] enabled_apis = ['osapi_compute'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.453090] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] enabled_ssl_apis = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.453252] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] flat_injected = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.453409] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] force_config_drive = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.453564] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] force_raw_images = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.453732] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] graceful_shutdown_timeout = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.453892] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] heal_instance_info_cache_interval = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.454130] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] host = cpu-1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.454307] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.454469] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.454630] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.454847] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.455031] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] instance_build_timeout = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.455194] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] instance_delete_interval = 300 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.455361] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] instance_format = [instance: %(uuid)s] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.455526] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] instance_name_template = instance-%08x {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.455685] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] instance_usage_audit = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.455854] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] instance_usage_audit_period = month {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.456031] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.456206] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.456374] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] internal_service_availability_zone = internal {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.456533] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] key = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.456690] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] live_migration_retry_count = 30 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.456858] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_color = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.457034] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_config_append = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.457208] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.457368] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_dir = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.457526] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.457651] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_options = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.457809] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_rotate_interval = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.458013] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_rotate_interval_type = days {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.458202] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] log_rotation_type = none {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.458330] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.458456] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.458624] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.458789] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.458930] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.459115] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] long_rpc_timeout = 1800 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.459281] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] max_concurrent_builds = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.459438] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] max_concurrent_live_migrations = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.459654] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] max_concurrent_snapshots = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.459933] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] max_local_block_devices = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.460233] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] max_logfile_count = 30 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.460506] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] max_logfile_size_mb = 200 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.460705] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] maximum_instance_delete_attempts = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.460885] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] metadata_listen = 0.0.0.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.461080] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] metadata_listen_port = 8775 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.461258] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] metadata_workers = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.461421] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] migrate_max_retries = -1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.461588] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] mkisofs_cmd = genisoimage {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.461801] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.461934] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] my_ip = 10.180.1.21 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.462160] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.462328] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] network_allocate_retries = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.462513] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.462682] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.462845] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] osapi_compute_listen_port = 8774 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.463023] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] osapi_compute_unique_server_name_scope = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.463194] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] osapi_compute_workers = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.463358] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] password_length = 12 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.463514] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] periodic_enable = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.463671] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] periodic_fuzzy_delay = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.463836] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] pointer_model = usbtablet {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.464008] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] preallocate_images = none {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.464179] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] publish_errors = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.464309] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] pybasedir = /opt/stack/nova {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.464461] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ram_allocation_ratio = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.464619] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] rate_limit_burst = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.464785] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] rate_limit_except_level = CRITICAL {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.464943] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] rate_limit_interval = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.465115] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] reboot_timeout = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.465273] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] reclaim_instance_interval = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.465429] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] record = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.465593] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] reimage_timeout_per_gb = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.465756] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] report_interval = 120 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.465928] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] rescue_timeout = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.466116] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] reserved_host_cpus = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.466280] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] reserved_host_disk_mb = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.466438] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] reserved_host_memory_mb = 512 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.466597] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] reserved_huge_pages = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.466758] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] resize_confirm_window = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.466915] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] resize_fs_using_block_device = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.467086] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] resume_guests_state_on_host_boot = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.467258] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.467420] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] rpc_response_timeout = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.467579] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] run_external_periodic_tasks = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.467746] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] running_deleted_instance_action = reap {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.467932] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.468117] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] running_deleted_instance_timeout = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.468282] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler_instance_sync_interval = 120 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.468453] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_down_time = 720 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.468620] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] servicegroup_driver = db {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.468778] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] shell_completion = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.468947] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] shelved_offload_time = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.469113] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] shelved_poll_interval = 3600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.469284] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] shutdown_timeout = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.469445] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] source_is_ipv6 = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.469602] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ssl_only = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.469866] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.470106] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] sync_power_state_interval = 600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.470293] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] sync_power_state_pool_size = 1000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.470466] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] syslog_log_facility = LOG_USER {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.470626] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] tempdir = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.470786] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] timeout_nbd = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.470955] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] transport_url = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.471131] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] update_resources_interval = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.471290] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] use_cow_images = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.471448] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] use_eventlog = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.471604] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] use_journal = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.471759] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] use_json = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.471914] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] use_rootwrap_daemon = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.472083] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] use_stderr = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.472242] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] use_syslog = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.472397] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vcpu_pin_set = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.472562] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plugging_is_fatal = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.472727] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plugging_timeout = 300 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.472895] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] virt_mkfs = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.473067] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] volume_usage_poll_interval = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.473227] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] watch_log_file = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.473393] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] web = /usr/share/spice-html5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 457.473580] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_concurrency.disable_process_locking = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.473897] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.474094] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.474264] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.474436] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.474607] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.474770] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.474950] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.auth_strategy = keystone {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.475134] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.compute_link_prefix = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.475311] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.475483] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.dhcp_domain = novalocal {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.475649] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.enable_instance_password = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.475829] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.glance_link_prefix = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.476032] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.476220] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.476399] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.instance_list_per_project_cells = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.476565] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.list_records_by_skipping_down_cells = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.476730] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.local_metadata_per_cell = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.476900] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.max_limit = 1000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.477082] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.metadata_cache_expiration = 15 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.477262] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.neutron_default_tenant_id = default {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.477436] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.response_validation = warn {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.477603] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.use_neutron_default_nets = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.477774] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.477978] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.478164] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.478341] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.478519] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.vendordata_dynamic_targets = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.478683] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.vendordata_jsonfile_path = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.478875] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.479078] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.backend = dogpile.cache.memcached {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.479253] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.backend_argument = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.479428] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.config_prefix = cache.oslo {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.479601] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.dead_timeout = 60.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.479766] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.debug_cache_backend = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.479926] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.enable_retry_client = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.480105] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.enable_socket_keepalive = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.480281] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.enabled = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.480444] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.enforce_fips_mode = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.480608] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.expiration_time = 600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.480770] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.hashclient_retry_attempts = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.480934] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.481111] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_dead_retry = 300 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.481272] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_password = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.481433] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.481593] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.481751] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_pool_maxsize = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.481910] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.482087] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_sasl_enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.482274] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.482439] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.482598] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.memcache_username = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.482763] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.proxies = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.482925] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.redis_db = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.483095] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.redis_password = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.483266] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.483442] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.483608] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.redis_server = localhost:6379 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.483771] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.redis_socket_timeout = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.483928] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.redis_username = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.484102] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.retry_attempts = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.484265] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.retry_delay = 0.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.484427] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.socket_keepalive_count = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.484585] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.socket_keepalive_idle = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.484741] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.socket_keepalive_interval = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.484898] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.tls_allowed_ciphers = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.485067] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.tls_cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.485226] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.tls_certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.485383] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.tls_enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.485538] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cache.tls_keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.485707] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.485906] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.auth_type = password {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.486090] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.486270] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.486432] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.486593] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.486766] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.cross_az_attach = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.486980] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.debug = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.487168] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.endpoint_template = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.487333] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.http_retries = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.487493] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.487650] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.487820] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.os_region_name = RegionOne {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.488047] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.488216] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cinder.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.488395] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.488559] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.cpu_dedicated_set = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.488719] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.cpu_shared_set = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.488888] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.image_type_exclude_list = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.489086] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.489268] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.489430] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.489591] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.489759] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.489921] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.resource_provider_association_refresh = 300 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.490097] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.490262] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.shutdown_retry_interval = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.490442] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.490620] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] conductor.workers = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.490801] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] console.allowed_origins = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.490962] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] console.ssl_ciphers = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.491153] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] console.ssl_minimum_version = default {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.491324] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] consoleauth.enforce_session_timeout = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.491497] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] consoleauth.token_ttl = 600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.491670] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.491828] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.491997] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.492171] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.connect_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.492330] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.connect_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.492489] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.endpoint_override = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.492652] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.492811] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.492971] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.max_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.493182] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.min_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.493386] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.region_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.493550] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.retriable_status_codes = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.493711] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.service_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.493884] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.service_type = accelerator {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.494060] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.494224] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.status_code_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.494379] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.status_code_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.494540] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.494721] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.494885] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] cyborg.version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.495080] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.backend = sqlalchemy {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.495256] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.connection = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.495434] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.connection_debug = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.495600] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.connection_parameters = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.495771] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.connection_recycle_time = 3600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.495979] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.connection_trace = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.496174] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.db_inc_retry_interval = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.496344] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.db_max_retries = 20 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.496507] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.db_max_retry_interval = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.496669] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.db_retry_interval = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.496831] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.max_overflow = 50 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.496991] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.max_pool_size = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.497168] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.max_retries = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.497339] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.497498] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.mysql_wsrep_sync_wait = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.497652] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.pool_timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.497813] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.retry_interval = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.498014] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.slave_connection = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.498183] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.sqlite_synchronous = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.498345] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] database.use_db_reconnect = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.498523] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.backend = sqlalchemy {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.498691] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.connection = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.498854] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.connection_debug = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.499035] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.connection_parameters = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.499205] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.connection_recycle_time = 3600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.499365] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.connection_trace = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.499880] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.db_inc_retry_interval = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.499880] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.db_max_retries = 20 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.499880] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.db_max_retry_interval = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.500131] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.db_retry_interval = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.500246] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.max_overflow = 50 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.500368] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.max_pool_size = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.500531] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.max_retries = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.500699] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.500858] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.501020] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.pool_timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.501183] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.retry_interval = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.501342] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.slave_connection = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.501500] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] api_database.sqlite_synchronous = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.501673] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] devices.enabled_mdev_types = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.501851] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.502031] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.502203] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ephemeral_storage_encryption.enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.502367] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.502537] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.api_servers = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.502700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.502864] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.503033] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.503203] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.connect_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.503363] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.connect_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.503525] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.debug = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.503688] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.default_trusted_certificate_ids = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.503848] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.enable_certificate_validation = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.504026] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.enable_rbd_download = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.504181] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.endpoint_override = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.504345] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.504506] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.504661] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.max_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.504815] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.min_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.504978] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.num_retries = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.505165] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.rbd_ceph_conf = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.505329] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.rbd_connect_timeout = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.505497] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.rbd_pool = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.505665] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.rbd_user = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.505824] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.region_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.506009] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.retriable_status_codes = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.506186] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.service_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.506356] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.service_type = image {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.506518] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.506674] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.status_code_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.506832] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.status_code_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.506992] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.507188] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.507353] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.verify_glance_signatures = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.507511] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] glance.version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.507727] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] guestfs.debug = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.507841] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.508045] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.auth_type = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.508219] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.508379] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.508543] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.508700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.connect_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.508856] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.connect_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.509049] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.endpoint_override = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.509219] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.509377] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.509534] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.max_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.509688] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.min_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.509843] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.region_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.509997] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.retriable_status_codes = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.510171] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.service_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.510341] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.service_type = shared-file-system {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.510503] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.share_apply_policy_timeout = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.510661] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.510815] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.status_code_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.510969] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.status_code_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.511135] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.511312] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.511473] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] manila.version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.511641] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] mks.enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.512031] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.512251] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] image_cache.manager_interval = 2400 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.512429] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] image_cache.precache_concurrency = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.512600] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] image_cache.remove_unused_base_images = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.512775] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.512944] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.513139] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] image_cache.subdirectory_name = _base {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.513318] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.api_max_retries = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.513483] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.api_retry_interval = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.513640] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.513800] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.auth_type = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.513957] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.514129] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.514290] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.514450] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.conductor_group = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.514604] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.connect_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.514761] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.connect_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.514917] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.endpoint_override = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.515088] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.515248] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.515403] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.max_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.515556] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.min_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.515719] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.peer_list = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.515911] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.region_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.516108] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.retriable_status_codes = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.516278] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.serial_console_state_timeout = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.516449] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.service_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.516603] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.service_type = baremetal {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.516762] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.shard = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.516924] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.517096] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.status_code_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.517256] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.status_code_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.517413] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.517626] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.517830] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ironic.version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.518055] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.518245] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] key_manager.fixed_key = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.518432] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.518596] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.barbican_api_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.518756] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.barbican_endpoint = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.518928] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.barbican_endpoint_type = public {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.519103] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.barbican_region_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.519265] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.519424] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.519579] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.519739] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.519896] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.520071] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.number_of_retries = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.520233] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.retry_delay = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.520428] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.send_service_user_token = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.520589] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.520744] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.520901] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.verify_ssl = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.521069] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican.verify_ssl_path = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.521237] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.521396] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.auth_type = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.521553] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.521707] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.521867] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.522032] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.522196] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.522357] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.522523] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] barbican_service_user.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.522676] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.approle_role_id = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.522835] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.approle_secret_id = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.523014] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.kv_mountpoint = secret {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.523182] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.kv_path = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.523345] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.kv_version = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.523504] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.namespace = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.523661] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.root_token_id = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.523813] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.ssl_ca_crt_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.523977] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.timeout = 60.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.524152] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.use_ssl = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.524322] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.524496] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.524658] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.auth_type = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.524815] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.524973] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.525150] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.525307] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.connect_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.525462] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.connect_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.525617] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.endpoint_override = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.525775] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.525928] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.526097] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.max_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.526254] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.min_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.526410] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.region_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.526565] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.retriable_status_codes = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.526718] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.service_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.526885] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.service_type = identity {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.527056] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.527216] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.status_code_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.527374] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.status_code_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.527533] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.527711] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.527875] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] keystone.version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.528080] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.ceph_mount_options = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530161] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530161] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.connection_uri = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530161] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.cpu_mode = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530161] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530161] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.cpu_models = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530161] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.cpu_power_governor_high = performance {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530161] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530513] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.cpu_power_management = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530513] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530513] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.device_detach_attempts = 8 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530642] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.device_detach_timeout = 20 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530690] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.disk_cachemodes = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.530850] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.disk_prefix = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.531025] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.enabled_perf_events = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.531203] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.file_backed_memory = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.531377] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.gid_maps = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.531542] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.hw_disk_discard = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.531702] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.hw_machine_type = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.531872] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.images_rbd_ceph_conf = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535017] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535017] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535017] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.images_rbd_glance_store_name = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535017] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.images_rbd_pool = rbd {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535017] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.images_type = default {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535017] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.images_volume_group = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535017] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.inject_key = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535383] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.inject_partition = -2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535383] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.inject_password = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535383] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.iscsi_iface = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535383] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.iser_use_multipath = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535383] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535383] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535383] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_downtime = 500 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_inbound_addr = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_scheme = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535700] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_timeout_action = abort {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535998] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_tunnelled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535998] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_uri = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535998] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.live_migration_with_native_tls = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535998] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.max_queues = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.535998] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.536243] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.536439] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.nfs_mount_options = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.536611] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.536780] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.536950] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.537121] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.537281] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.537439] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.num_pcie_ports = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.537599] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.537759] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.pmem_namespaces = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.537931] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.quobyte_client_cfg = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541022] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541022] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541022] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541022] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541022] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rbd_secret_uuid = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541022] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rbd_user = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541022] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541369] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541369] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rescue_image_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541369] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rescue_kernel_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541369] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rescue_ramdisk_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541369] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541369] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.rx_queue_size = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541369] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.smbfs_mount_options = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541676] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541676] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.snapshot_compression = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541676] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.snapshot_image_format = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541676] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541676] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.sparse_logical_volumes = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541676] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.swtpm_enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541676] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.swtpm_group = tss {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541974] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.swtpm_user = tss {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.541974] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.sysinfo_serial = unique {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.542252] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.tb_cache_size = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.542252] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.tx_queue_size = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.542449] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.uid_maps = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.542545] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.use_virtio_for_bridges = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.542703] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.virt_type = kvm {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.542860] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.volume_clear = zero {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.543367] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.volume_clear_size = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.543367] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.volume_use_multipath = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.543367] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.vzstorage_cache_path = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.543626] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.543696] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.543856] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.544034] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.544316] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.544496] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.vzstorage_mount_user = stack {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.544664] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.544837] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.545019] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.auth_type = password {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.545190] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.545349] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.545509] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.545664] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.connect_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.545819] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.connect_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.545985] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.default_floating_pool = public {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.546161] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.endpoint_override = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.546325] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.extension_sync_interval = 600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.546485] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.http_retries = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.546642] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.546798] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.546956] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.max_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.547140] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.547299] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.min_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.547461] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.ovs_bridge = br-int {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.547620] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.physnets = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.547788] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.region_name = RegionOne {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.547968] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.retriable_status_codes = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.548219] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.service_metadata_proxy = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.548401] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.service_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.548575] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.service_type = network {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.548739] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.548897] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.status_code_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.549097] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.status_code_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.549268] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.549454] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.549614] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] neutron.version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.549787] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] notifications.bdms_in_notifications = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.549963] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] notifications.default_level = INFO {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.550149] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] notifications.include_share_mapping = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.550326] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] notifications.notification_format = unversioned {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.550491] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] notifications.notify_on_state_change = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.550667] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.550845] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] pci.alias = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.551020] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] pci.device_spec = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.551190] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] pci.report_in_placement = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.551369] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.551544] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.auth_type = password {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.551711] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.551871] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.552037] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.552206] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.552367] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.connect_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.552524] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.connect_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.552682] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.default_domain_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.552838] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.default_domain_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.552995] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.domain_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.553167] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.domain_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.553324] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.endpoint_override = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.553481] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.553634] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.553791] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.max_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.553944] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.min_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.554126] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.password = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.554289] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.project_domain_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.554452] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.project_domain_name = Default {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.554614] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.project_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.554787] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.project_name = service {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.554955] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.region_name = RegionOne {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.555136] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.retriable_status_codes = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.555298] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.service_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.555469] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.service_type = placement {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.555627] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.555786] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.status_code_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.555946] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.status_code_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.556119] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.system_scope = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.556279] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.556441] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.trust_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.556589] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.user_domain_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.556753] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.user_domain_name = Default {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.556911] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.user_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.557098] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.username = nova {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.557355] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.557535] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] placement.version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.557721] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.cores = 20 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.557896] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.count_usage_from_placement = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.558101] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.558301] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.injected_file_content_bytes = 10240 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.558472] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.injected_file_path_length = 255 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.558689] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.injected_files = 5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.558899] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.instances = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.559090] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.key_pairs = 100 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.559265] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.metadata_items = 128 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.559432] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.ram = 51200 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.559596] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.recheck_quota = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.559761] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.server_group_members = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.559927] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] quota.server_groups = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.560118] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.560284] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.560443] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.image_metadata_prefilter = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.560601] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.560761] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.max_attempts = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.560920] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.max_placement_results = 1000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.561094] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.561255] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.561413] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.561587] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] scheduler.workers = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.561754] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.561924] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.562117] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.562289] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.562448] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.562608] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.562766] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.562956] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.563138] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.host_subset_size = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.563302] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.563456] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.563616] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.563780] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.isolated_hosts = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.563944] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.isolated_images = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.564121] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.564282] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.564477] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.564689] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.pci_in_placement = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.564873] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.565046] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.565216] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.565440] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.565543] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.565710] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.565874] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.track_instance_changes = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.566065] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.566310] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] metrics.required = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.566496] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] metrics.weight_multiplier = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.566659] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.566821] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] metrics.weight_setting = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.567168] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.567347] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] serial_console.enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.567525] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] serial_console.port_range = 10000:20000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.567697] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.567868] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.568080] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] serial_console.serialproxy_port = 6083 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.568258] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.568438] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.auth_type = password {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.568600] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.568759] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.568970] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.569249] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.569492] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.569785] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.send_service_user_token = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.570073] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.570351] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] service_user.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.570647] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.agent_enabled = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.570916] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.571416] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.571685] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.571878] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.html5proxy_port = 6082 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.572066] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.image_compression = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.572349] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.jpeg_compression = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.572592] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.playback_compression = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.572779] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.require_secure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.572958] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.server_listen = 127.0.0.1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.573146] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.573311] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.streaming_mode = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.573481] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] spice.zlib_compression = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.573651] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] upgrade_levels.baseapi = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.573823] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] upgrade_levels.compute = auto {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.573986] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] upgrade_levels.conductor = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.574203] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] upgrade_levels.scheduler = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.574391] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.574554] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.574714] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.574869] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.575040] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.575206] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.575362] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.575522] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.575679] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vendordata_dynamic_auth.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.575855] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.api_retry_count = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.576039] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.ca_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.576199] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.576407] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.cluster_name = testcl1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.576581] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.connection_pool_size = 10 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.576740] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.console_delay_seconds = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.576912] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.datastore_regex = ^datastore.* {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.577153] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.577328] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.host_password = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.577499] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.host_port = 443 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.577664] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.host_username = administrator@vsphere.local {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.577832] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.insecure = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.578034] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.integration_bridge = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.578218] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.maximum_objects = 100 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.578382] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.pbm_default_policy = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.578543] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.pbm_enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.578702] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.pbm_wsdl_location = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.578871] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.579089] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.serial_port_proxy_uri = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.579266] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.serial_port_service_uri = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.579439] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.task_poll_interval = 0.5 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.579613] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.use_linked_clone = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.579779] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.vnc_keymap = en-us {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.579944] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.vnc_port = 5900 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.580124] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vmware.vnc_port_total = 10000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.580317] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.auth_schemes = ['none'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.580491] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.580819] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.581042] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.581247] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.novncproxy_port = 6080 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.581431] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.server_listen = 127.0.0.1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.581606] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.581770] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.vencrypt_ca_certs = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.581926] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.vencrypt_client_cert = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.582101] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vnc.vencrypt_client_key = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.582285] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.582448] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.disable_deep_image_inspection = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.582609] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.582767] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.582924] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.583098] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.disable_rootwrap = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.583263] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.enable_numa_live_migration = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.583427] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.583590] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.583749] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.583909] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.libvirt_disable_apic = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.584088] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.584258] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.584421] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.584581] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.584742] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.584900] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.585073] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.585239] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.585398] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.585566] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.585755] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.585954] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.client_socket_timeout = 900 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.586164] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.default_pool_size = 1000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.586339] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.keep_alive = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.586504] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.max_header_line = 16384 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.586668] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.586827] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.ssl_ca_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.586986] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.ssl_cert_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.587165] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.ssl_key_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.587329] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.tcp_keepidle = 600 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.587509] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.587678] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] zvm.ca_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.587840] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] zvm.cloud_connector_url = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.588196] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.588380] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] zvm.reachable_timeout = 300 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.588567] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.enforce_new_defaults = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.588978] env[61999]: WARNING oslo_config.cfg [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 457.589182] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.enforce_scope = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.589365] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.policy_default_rule = default {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.589550] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.589728] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.policy_file = policy.yaml {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.589903] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.590077] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.590242] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.590400] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.590562] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.590731] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.590906] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.591096] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.connection_string = messaging:// {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.591267] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.enabled = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.591434] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.es_doc_type = notification {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.591596] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.es_scroll_size = 10000 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.591764] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.es_scroll_time = 2m {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.591925] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.filter_error_trace = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.592111] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.hmac_keys = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.592279] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.sentinel_service_name = mymaster {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.592446] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.socket_timeout = 0.1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.592605] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.trace_requests = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.592764] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler.trace_sqlalchemy = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.592949] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler_jaeger.process_tags = {} {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.593124] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler_jaeger.service_name_prefix = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.593289] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] profiler_otlp.service_name_prefix = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.593457] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] remote_debug.host = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.593613] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] remote_debug.port = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.593794] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.593955] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.594133] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.594297] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.594458] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.594616] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.594773] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.594934] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.595169] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.595273] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.595428] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.595596] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.595762] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.595995] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.596257] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.596445] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.596613] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.596791] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.596954] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.597137] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.597306] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.597470] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.597631] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.597797] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.597982] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.598177] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.598348] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.598522] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.598750] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.598930] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.ssl = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.599127] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.599302] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.599467] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.599642] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.599816] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.599981] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.600198] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.600368] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_notifications.retry = -1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.600558] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.600734] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.600908] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.auth_section = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.601086] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.auth_type = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.601248] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.cafile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.601405] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.certfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.601568] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.collect_timing = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.601723] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.connect_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.601881] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.connect_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.602051] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.endpoint_id = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.602217] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.endpoint_override = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.602377] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.insecure = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.602532] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.keyfile = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.602687] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.max_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.602841] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.min_version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.602997] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.region_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.603174] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.retriable_status_codes = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.603329] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.service_name = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.603480] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.service_type = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.603639] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.split_loggers = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.603797] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.status_code_retries = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.603947] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.status_code_retry_delay = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.604136] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.timeout = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.604295] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.valid_interfaces = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.604454] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_limit.version = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.604619] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_reports.file_event_handler = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.604779] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.604933] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] oslo_reports.log_dir = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.605121] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.605281] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.605435] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.605600] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.605764] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.605920] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.606101] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.606261] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_ovs_privileged.group = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.606419] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.606580] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.606737] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.606895] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] vif_plug_ovs_privileged.user = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.607078] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.607261] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.607435] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.607606] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.607777] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.607979] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.608210] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.608390] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.608576] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.608748] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_ovs.isolate_vif = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.608918] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.609136] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.609317] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.609492] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.609657] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_vif_ovs.per_port_bridge = False {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.609826] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_brick.lock_path = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.609989] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.610168] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.610339] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] privsep_osbrick.capabilities = [21] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.610499] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] privsep_osbrick.group = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.610653] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] privsep_osbrick.helper_command = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.610814] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.610977] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.611152] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] privsep_osbrick.user = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.611325] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.611483] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] nova_sys_admin.group = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.611640] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] nova_sys_admin.helper_command = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.611803] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.611965] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.612136] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] nova_sys_admin.user = None {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 457.612273] env[61999]: DEBUG oslo_service.service [None req-23f85225-0bf0-4d82-b190-3d587244c818 None None] ******************************************************************************** {{(pid=61999) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 457.612791] env[61999]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 458.115899] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Getting list of instances from cluster (obj){ [ 458.115899] env[61999]: value = "domain-c8" [ 458.115899] env[61999]: _type = "ClusterComputeResource" [ 458.115899] env[61999]: } {{(pid=61999) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 458.117099] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dfa01e-5bf7-4cef-bb08-df1af14aba22 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 458.129245] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Got total of 0 instances {{(pid=61999) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 458.130080] env[61999]: WARNING nova.virt.vmwareapi.driver [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 458.130760] env[61999]: INFO nova.virt.node [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Generated node identity dee5b0a7-9732-42d5-93c0-6b719a790f37 [ 458.131115] env[61999]: INFO nova.virt.node [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Wrote node identity dee5b0a7-9732-42d5-93c0-6b719a790f37 to /opt/stack/data/n-cpu-1/compute_id [ 458.634632] env[61999]: WARNING nova.compute.manager [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Compute nodes ['dee5b0a7-9732-42d5-93c0-6b719a790f37'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 459.640465] env[61999]: INFO nova.compute.manager [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 460.645935] env[61999]: WARNING nova.compute.manager [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 460.646281] env[61999]: DEBUG oslo_concurrency.lockutils [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 460.646434] env[61999]: DEBUG oslo_concurrency.lockutils [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 460.646576] env[61999]: DEBUG oslo_concurrency.lockutils [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 460.646723] env[61999]: DEBUG nova.compute.resource_tracker [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61999) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 460.647645] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e056ead-77a4-4fbd-883f-4afff8e8f71a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 460.656217] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b551d7-30f4-46c8-a8fa-98dcfb9bf802 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 460.671021] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee2f99b-017a-4eb7-bd61-31d4bc1d6d25 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 460.677139] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3998e27e-f4d0-4243-8508-752954c81f3f {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 460.706133] env[61999]: DEBUG nova.compute.resource_tracker [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181608MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61999) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 460.706320] env[61999]: DEBUG oslo_concurrency.lockutils [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 460.706466] env[61999]: DEBUG oslo_concurrency.lockutils [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 461.208826] env[61999]: WARNING nova.compute.resource_tracker [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] No compute node record for cpu-1:dee5b0a7-9732-42d5-93c0-6b719a790f37: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host dee5b0a7-9732-42d5-93c0-6b719a790f37 could not be found. [ 461.712958] env[61999]: INFO nova.compute.resource_tracker [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: dee5b0a7-9732-42d5-93c0-6b719a790f37 [ 463.220932] env[61999]: DEBUG nova.compute.resource_tracker [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61999) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 463.221309] env[61999]: DEBUG nova.compute.resource_tracker [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61999) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 463.377879] env[61999]: INFO nova.scheduler.client.report [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] [req-de3c40ef-5988-405e-a86c-e2c3f4270a1c] Created resource provider record via placement API for resource provider with UUID dee5b0a7-9732-42d5-93c0-6b719a790f37 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 463.397246] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58857b3-f985-48cf-9529-866f0f4f7ffd {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 463.404746] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdfa45c-cacf-431e-9966-c03eddea09cf {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 463.434099] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fcdf697-2754-49b2-b23f-c13af019b8c4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 463.440587] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabd814e-da89-457a-898c-7b17f70ee9cf {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 463.452982] env[61999]: DEBUG nova.compute.provider_tree [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Updating inventory in ProviderTree for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 463.989889] env[61999]: DEBUG nova.scheduler.client.report [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Updated inventory for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 463.991056] env[61999]: DEBUG nova.compute.provider_tree [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Updating resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37 generation from 0 to 1 during operation: update_inventory {{(pid=61999) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 463.991056] env[61999]: DEBUG nova.compute.provider_tree [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Updating inventory in ProviderTree for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 464.039783] env[61999]: DEBUG nova.compute.provider_tree [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Updating resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37 generation from 1 to 2 during operation: update_traits {{(pid=61999) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 464.544791] env[61999]: DEBUG nova.compute.resource_tracker [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61999) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 464.545409] env[61999]: DEBUG oslo_concurrency.lockutils [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.839s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 464.545409] env[61999]: DEBUG nova.service [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Creating RPC server for service compute {{(pid=61999) start /opt/stack/nova/nova/service.py:186}} [ 464.558757] env[61999]: DEBUG nova.service [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] Join ServiceGroup membership for this service compute {{(pid=61999) start /opt/stack/nova/nova/service.py:203}} [ 464.558992] env[61999]: DEBUG nova.servicegroup.drivers.db [None req-d1ae9b71-e7d1-4dad-8358-94309b39594c None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61999) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 471.561081] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._sync_power_states {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 472.063920] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Getting list of instances from cluster (obj){ [ 472.063920] env[61999]: value = "domain-c8" [ 472.063920] env[61999]: _type = "ClusterComputeResource" [ 472.063920] env[61999]: } {{(pid=61999) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 472.065432] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930574eb-a925-400a-aa8c-a60313f745e0 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.073690] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Got total of 0 instances {{(pid=61999) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 472.073905] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 472.074200] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Getting list of instances from cluster (obj){ [ 472.074200] env[61999]: value = "domain-c8" [ 472.074200] env[61999]: _type = "ClusterComputeResource" [ 472.074200] env[61999]: } {{(pid=61999) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 472.075011] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4420e35-1413-440c-b81f-52481e64d252 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.082052] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Got total of 0 instances {{(pid=61999) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 501.596653] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Acquiring lock "77195a41-5390-4d50-b9e9-43f4e586fe2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.596653] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Lock "77195a41-5390-4d50-b9e9-43f4e586fe2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.102273] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 502.422893] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.423167] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Lock "1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.641970] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.642278] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.645392] env[61999]: INFO nova.compute.claims [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 502.925568] env[61999]: DEBUG nova.compute.manager [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 503.457380] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.747074] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbe176a-1ece-4b5c-85ae-5d91f505761e {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.763308] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0988f89-0127-483c-83a4-56721f33c4be {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.794195] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d141086a-a411-46e2-be0b-e942d2128e4c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.804408] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95aeaa37-66f3-4495-aa3b-76c880e63744 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.822109] env[61999]: DEBUG nova.compute.provider_tree [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 504.252468] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Acquiring lock "492a50f9-06bf-40b2-8746-1516c045b9b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.252833] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Lock "492a50f9-06bf-40b2-8746-1516c045b9b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.327440] env[61999]: DEBUG nova.scheduler.client.report [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 504.492965] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Acquiring lock "2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.494675] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Lock "2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.758024] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 504.835592] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 504.836538] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 504.843906] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.387s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.845360] env[61999]: INFO nova.compute.claims [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 505.000386] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 505.115110] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Acquiring lock "530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.115395] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Lock "530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.286171] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.351623] env[61999]: DEBUG nova.compute.utils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 505.355399] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 505.355762] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 505.530272] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.620331] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 505.734893] env[61999]: DEBUG nova.policy [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ceaa0ed456d43fe9ff44e8213b46f4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '462ba1ab039a41d584952ff58e871121', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 505.856985] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 505.997748] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74415467-0f8b-459c-bb23-4fd9ccf3f1a3 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.005655] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71546f4-83fb-4146-ba2e-dc2966a69dcc {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.043857] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b76f54-f156-4065-9fca-5ce5885ca4ce {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.052797] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926ce55d-4c53-4f07-a792-41f1ea24be40 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.069834] env[61999]: DEBUG nova.compute.provider_tree [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 506.079326] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Acquiring lock "91c68a8b-8a6b-4e31-8b76-4da94fa66748" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.079535] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Lock "91c68a8b-8a6b-4e31-8b76-4da94fa66748" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.149159] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.456385] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Successfully created port: d8e432a4-6e78-4bea-9ce8-5d3e18da0476 {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 506.574156] env[61999]: DEBUG nova.scheduler.client.report [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 506.583249] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 506.874796] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 506.901040] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 506.901431] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 506.901431] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 506.901613] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 506.901950] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 506.901950] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 506.902169] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 506.902361] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 506.903359] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 506.903359] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 506.903359] env[61999]: DEBUG nova.virt.hardware [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 506.903908] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3625eb-9ae7-440a-8237-04d4839f0ea3 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.912747] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c447612-07f4-45d1-bcf1-24260d2cd5dd {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.928810] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef64dae-6e4a-467f-a4a8-67f075dab920 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.083207] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.238s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 507.083207] env[61999]: DEBUG nova.compute.manager [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 507.086050] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.800s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.088065] env[61999]: INFO nova.compute.claims [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 507.128426] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.593691] env[61999]: DEBUG nova.compute.utils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 507.598068] env[61999]: DEBUG nova.compute.manager [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Not allocating networking since 'none' was specified. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 507.647211] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Acquiring lock "bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.647436] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Lock "bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 508.102019] env[61999]: DEBUG nova.compute.manager [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 508.155214] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 508.251834] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1ec0f9-41db-4ea4-819b-591ae7498735 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.259628] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de784bb-e754-41e9-9dbb-46cc02c06830 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.290725] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2817fb8a-816f-4b76-bf15-cd20ec6d74c4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.297989] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c64b93-62fe-4e43-b3a9-970979587f02 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.311948] env[61999]: DEBUG nova.compute.provider_tree [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 508.703931] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.815928] env[61999]: DEBUG nova.scheduler.client.report [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 509.118699] env[61999]: DEBUG nova.compute.manager [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 509.153627] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 509.153999] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 509.154079] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 509.154215] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 509.154355] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 509.154496] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 509.154691] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 509.154841] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 509.155012] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 509.155746] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 509.155939] env[61999]: DEBUG nova.virt.hardware [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 509.157085] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f5a865-e6ea-498f-9dbe-2d46b4a84a22 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.171794] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9593b9a-6226-44ce-a26a-008d390f6b70 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.191640] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Instance VIF info [] {{(pid=61999) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 509.201452] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61999) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 509.201838] env[61999]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75000ae6-dc84-4e56-bbdc-777aafadde0c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.213294] env[61999]: INFO nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Created folder: OpenStack in parent group-v4. [ 509.214413] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating folder: Project (1b3c9292585f494a9fa02cd30f8482ea). Parent ref: group-v230859. {{(pid=61999) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 509.214603] env[61999]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2c81fa7-a2e4-448c-96e7-2b649c82ce78 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.225590] env[61999]: INFO nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Created folder: Project (1b3c9292585f494a9fa02cd30f8482ea) in parent group-v230859. [ 509.226758] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating folder: Instances. Parent ref: group-v230860. {{(pid=61999) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 509.226758] env[61999]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab58cad1-caf0-4169-afe9-b265ff9f694d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.235201] env[61999]: INFO nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Created folder: Instances in parent group-v230860. [ 509.235458] env[61999]: DEBUG oslo.service.loopingcall [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 509.235646] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Creating VM on the ESX host {{(pid=61999) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 509.236264] env[61999]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e17a161f-1d46-4867-b5b8-154d95ddc956 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.256564] env[61999]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 509.256564] env[61999]: value = "task-1043178" [ 509.256564] env[61999]: _type = "Task" [ 509.256564] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 509.263656] env[61999]: DEBUG oslo_vmware.api [-] Task: {'id': task-1043178, 'name': CreateVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 509.325736] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 509.326411] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 509.329148] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.799s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 509.333260] env[61999]: INFO nova.compute.claims [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 509.386768] env[61999]: ERROR nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. [ 509.386768] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 509.386768] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 509.386768] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 509.386768] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 509.386768] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 509.386768] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 509.386768] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 509.386768] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 509.386768] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 509.386768] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 509.386768] env[61999]: ERROR nova.compute.manager raise self.value [ 509.386768] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 509.386768] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 509.386768] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 509.386768] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 509.387316] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 509.387316] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 509.387316] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. [ 509.387316] env[61999]: ERROR nova.compute.manager [ 509.387316] env[61999]: Traceback (most recent call last): [ 509.387316] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 509.387316] env[61999]: listener.cb(fileno) [ 509.387316] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 509.387316] env[61999]: result = function(*args, **kwargs) [ 509.387316] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 509.387316] env[61999]: return func(*args, **kwargs) [ 509.387316] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 509.387316] env[61999]: raise e [ 509.387316] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 509.387316] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 509.387316] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 509.387316] env[61999]: created_port_ids = self._update_ports_for_instance( [ 509.387316] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 509.387316] env[61999]: with excutils.save_and_reraise_exception(): [ 509.387316] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 509.387316] env[61999]: self.force_reraise() [ 509.387316] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 509.387316] env[61999]: raise self.value [ 509.387316] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 509.387316] env[61999]: updated_port = self._update_port( [ 509.387316] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 509.387316] env[61999]: _ensure_no_port_binding_failure(port) [ 509.387316] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 509.387316] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 509.388073] env[61999]: nova.exception.PortBindingFailed: Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. [ 509.388073] env[61999]: Removing descriptor: 15 [ 509.388926] env[61999]: ERROR nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Traceback (most recent call last): [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] yield resources [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self.driver.spawn(context, instance, image_meta, [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] vm_ref = self.build_virtual_machine(instance, [ 509.388926] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] vif_infos = vmwarevif.get_vif_info(self._session, [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] for vif in network_info: [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] return self._sync_wrapper(fn, *args, **kwargs) [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self.wait() [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self[:] = self._gt.wait() [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] return self._exit_event.wait() [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 509.389289] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] result = hub.switch() [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] return self.greenlet.switch() [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] result = function(*args, **kwargs) [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] return func(*args, **kwargs) [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] raise e [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] nwinfo = self.network_api.allocate_for_instance( [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] created_port_ids = self._update_ports_for_instance( [ 509.389621] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] with excutils.save_and_reraise_exception(): [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self.force_reraise() [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] raise self.value [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] updated_port = self._update_port( [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] _ensure_no_port_binding_failure(port) [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] raise exception.PortBindingFailed(port_id=port['id']) [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] nova.exception.PortBindingFailed: Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. [ 509.389978] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] [ 509.390341] env[61999]: INFO nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Terminating instance [ 509.771029] env[61999]: DEBUG oslo_vmware.api [-] Task: {'id': task-1043178, 'name': CreateVM_Task, 'duration_secs': 0.335651} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 509.771029] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Created VM on the ESX host {{(pid=61999) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 509.771029] env[61999]: DEBUG oslo_vmware.service [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f91039-b026-4f27-8e7c-004d17f52b29 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.777123] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 509.777304] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 509.778492] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 509.778857] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19af17fc-55eb-4fec-a16d-6edb6ef7d5df {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.783188] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 509.783188] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]526e4637-72b0-9dc2-210f-6b5e02e2b9fe" [ 509.783188] env[61999]: _type = "Task" [ 509.783188] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 509.790890] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]526e4637-72b0-9dc2-210f-6b5e02e2b9fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 509.835381] env[61999]: DEBUG nova.compute.utils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 509.841395] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 509.841395] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 509.894238] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Acquiring lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 509.894468] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Acquired lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 509.894707] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 509.950781] env[61999]: DEBUG nova.policy [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080702eb4beb4227b3bfd37c2eb51ed5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '599942d196d144c687513803d5b375e4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 510.226557] env[61999]: DEBUG nova.compute.manager [req-aea439da-4bc0-4ad5-ba85-8f1286563ea2 req-17a1b967-8b68-4d3f-ab17-61bb8c4d774d service nova] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Received event network-changed-d8e432a4-6e78-4bea-9ce8-5d3e18da0476 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 510.226928] env[61999]: DEBUG nova.compute.manager [req-aea439da-4bc0-4ad5-ba85-8f1286563ea2 req-17a1b967-8b68-4d3f-ab17-61bb8c4d774d service nova] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Refreshing instance network info cache due to event network-changed-d8e432a4-6e78-4bea-9ce8-5d3e18da0476. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 510.228498] env[61999]: DEBUG oslo_concurrency.lockutils [req-aea439da-4bc0-4ad5-ba85-8f1286563ea2 req-17a1b967-8b68-4d3f-ab17-61bb8c4d774d service nova] Acquiring lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 510.299028] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 510.299028] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Processing image 9eed6d08-b529-4317-89cc-ae13b2d60cea {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 510.299028] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 510.299237] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 510.299519] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 510.299785] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7df216f4-c7ad-4f49-a6e3-df03bc221f79 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.319217] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 510.319217] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61999) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 510.319935] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20d36dd-6af0-484e-a235-ee2a7782ad23 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.330102] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe5a0b7d-7115-42f1-8ccd-75ae66f73ac7 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.336755] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 510.336755] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]52e56fc2-ff3e-49e8-f2f9-8b2f82a279f2" [ 510.336755] env[61999]: _type = "Task" [ 510.336755] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 510.343609] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 510.353491] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Preparing fetch location {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 510.353729] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating directory with path [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 510.353959] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f110e741-1a24-4a3e-b74b-6384e1afd8c7 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.382814] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Created directory with path [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 510.382814] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Fetch image to [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 510.382814] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Downloading image file data 9eed6d08-b529-4317-89cc-ae13b2d60cea to [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk on the data store datastore2 {{(pid=61999) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 510.389673] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcea2b1-c2ef-4116-9f8d-6fbc81c52cfc {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.406300] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31feafa2-bd85-4a13-a5d7-4f6bd5e18dbf {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.428941] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb80bb1-a049-4ed8-9fae-8a33e46eb905 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.470098] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc5661a-4c2d-4f32-88c6-fc5474a391c6 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.478806] env[61999]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c080db4c-a0e3-4ee8-89bb-c39cab9d4fb7 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.497839] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 510.503267] env[61999]: DEBUG nova.virt.vmwareapi.images [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Downloading image file data 9eed6d08-b529-4317-89cc-ae13b2d60cea to the data store datastore2 {{(pid=61999) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 510.539519] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50043df-8ce5-4c3b-b66c-ed62a3e61eb1 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.553191] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6dbaf6-bf4f-42e1-86cf-708e4373040f {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.601670] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a3db6b-872d-4503-b411-65685265a414 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.615101] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 510.621701] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34211bbe-918c-4aee-ac83-6fb779eb0c8b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.642695] env[61999]: DEBUG nova.compute.provider_tree [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 510.661561] env[61999]: DEBUG oslo_vmware.rw_handles [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61999) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 510.900567] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Successfully created port: 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 511.122211] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Releasing lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 511.122762] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 511.123783] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 511.124162] env[61999]: DEBUG oslo_concurrency.lockutils [req-aea439da-4bc0-4ad5-ba85-8f1286563ea2 req-17a1b967-8b68-4d3f-ab17-61bb8c4d774d service nova] Acquired lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 511.124380] env[61999]: DEBUG nova.network.neutron [req-aea439da-4bc0-4ad5-ba85-8f1286563ea2 req-17a1b967-8b68-4d3f-ab17-61bb8c4d774d service nova] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Refreshing network info cache for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476 {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 511.125679] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e8a3005-08b8-48bf-822e-800f53769393 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.137045] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f52656a-4d1f-4e36-a956-fb4df967493d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.157036] env[61999]: DEBUG nova.scheduler.client.report [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 511.175016] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 77195a41-5390-4d50-b9e9-43f4e586fe2e could not be found. [ 511.175259] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 511.175753] env[61999]: INFO nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 511.175990] env[61999]: DEBUG oslo.service.loopingcall [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 511.176895] env[61999]: DEBUG nova.compute.manager [-] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 511.177086] env[61999]: DEBUG nova.network.neutron [-] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 511.223308] env[61999]: DEBUG nova.network.neutron [-] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 511.358355] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 511.386535] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 511.386903] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 511.387124] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 511.387359] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 511.387531] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 511.387706] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 511.387954] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 511.388232] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 511.388437] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 511.388690] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 511.388853] env[61999]: DEBUG nova.virt.hardware [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 511.389892] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883cb309-daf9-4e34-9164-7995caca0a13 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.398992] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e410808c-f882-49f1-b15b-4c0479d06d94 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.503185] env[61999]: DEBUG oslo_vmware.rw_handles [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Completed reading data from the image iterator. {{(pid=61999) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 511.503334] env[61999]: DEBUG oslo_vmware.rw_handles [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61999) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 511.572933] env[61999]: DEBUG nova.virt.vmwareapi.images [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Downloaded image file data 9eed6d08-b529-4317-89cc-ae13b2d60cea to vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk on the data store datastore2 {{(pid=61999) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 511.575017] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Caching image {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 511.575017] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Copying Virtual Disk [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk to [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 511.575235] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a3bb496-fa8e-4a92-afdf-ef932b9e4220 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.588031] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 511.588031] env[61999]: value = "task-1043179" [ 511.588031] env[61999]: _type = "Task" [ 511.588031] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 511.594433] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043179, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 511.659945] env[61999]: DEBUG nova.network.neutron [req-aea439da-4bc0-4ad5-ba85-8f1286563ea2 req-17a1b967-8b68-4d3f-ab17-61bb8c4d774d service nova] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 511.663522] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 511.664165] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 511.667246] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.518s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.668851] env[61999]: INFO nova.compute.claims [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 511.728145] env[61999]: DEBUG nova.network.neutron [-] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 511.820724] env[61999]: DEBUG nova.network.neutron [req-aea439da-4bc0-4ad5-ba85-8f1286563ea2 req-17a1b967-8b68-4d3f-ab17-61bb8c4d774d service nova] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 512.097981] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043179, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 512.174674] env[61999]: DEBUG nova.compute.utils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 512.182729] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 512.183042] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 512.232890] env[61999]: INFO nova.compute.manager [-] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Took 1.06 seconds to deallocate network for instance. [ 512.235740] env[61999]: DEBUG nova.compute.claims [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 512.236025] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.324215] env[61999]: DEBUG oslo_concurrency.lockutils [req-aea439da-4bc0-4ad5-ba85-8f1286563ea2 req-17a1b967-8b68-4d3f-ab17-61bb8c4d774d service nova] Releasing lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 512.458983] env[61999]: DEBUG nova.policy [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbfaeb610944472dae217b2b94508e1f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd79f9d0790de4ea2aa940a4806ccca4a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 512.598049] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043179, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722041} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 512.598126] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Copied Virtual Disk [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk to [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 512.598304] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleting the datastore file [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 512.598572] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1fdda9d-df0d-47e5-8c1d-17bde2e61a15 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.606198] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 512.606198] env[61999]: value = "task-1043180" [ 512.606198] env[61999]: _type = "Task" [ 512.606198] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 512.616980] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 512.684679] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 512.850044] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0f6285-622f-44ba-bee4-f38af669671e {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.858473] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40615c7-8bc6-4c0e-a78f-0ebc7b47f8b0 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.897560] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb346f39-7242-49af-8021-f82de6258751 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.905734] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea95658-51d4-4056-ba72-424a0116cf54 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.919301] env[61999]: DEBUG nova.compute.provider_tree [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 513.051848] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.053535] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.053535] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Starting heal instance info cache {{(pid=61999) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10401}} [ 513.053648] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Rebuilding the list of instances to heal {{(pid=61999) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10405}} [ 513.123130] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024917} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 513.123466] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleted the datastore file {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 513.123902] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Moving file from [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741/9eed6d08-b529-4317-89cc-ae13b2d60cea to [datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea. {{(pid=61999) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 513.124231] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-f1196753-f9e0-47e8-afb7-087abcaa280b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.132088] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 513.132088] env[61999]: value = "task-1043181" [ 513.132088] env[61999]: _type = "Task" [ 513.132088] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 513.140988] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043181, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 513.423237] env[61999]: DEBUG nova.scheduler.client.report [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 513.560344] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Skipping network cache update for instance because it is Building. {{(pid=61999) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10414}} [ 513.560344] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Skipping network cache update for instance because it is Building. {{(pid=61999) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10414}} [ 513.560680] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Skipping network cache update for instance because it is Building. {{(pid=61999) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10414}} [ 513.561392] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Skipping network cache update for instance because it is Building. {{(pid=61999) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10414}} [ 513.561670] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Skipping network cache update for instance because it is Building. {{(pid=61999) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10414}} [ 513.561825] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Didn't find any instances for network info cache update. {{(pid=61999) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10487}} [ 513.561964] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.563016] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.563016] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.563016] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.563247] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.563810] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.564022] env[61999]: DEBUG nova.compute.manager [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61999) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11020}} [ 513.564159] env[61999]: DEBUG oslo_service.periodic_task [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Running periodic task ComputeManager.update_available_resource {{(pid=61999) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.647201] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043181, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024799} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 513.647982] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] File moved {{(pid=61999) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 513.647982] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Cleaning up location [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741 {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 513.648741] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleting the datastore file [datastore2] vmware_temp/30366221-e5ae-4cec-925a-ad81cc83c741 {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 513.649116] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc5cdade-3e16-4c87-8e26-d7417ef6111d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.658486] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 513.658486] env[61999]: value = "task-1043182" [ 513.658486] env[61999]: _type = "Task" [ 513.658486] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 513.674064] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 513.706097] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 513.751158] env[61999]: DEBUG nova.compute.manager [req-037b1cd1-959c-4275-80b1-fd427ebefaa8 req-f9369647-683a-4605-915e-a9d1c494f9f0 service nova] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Received event network-vif-deleted-d8e432a4-6e78-4bea-9ce8-5d3e18da0476 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 513.759029] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 513.759384] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 513.759515] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 513.759727] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 513.760096] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 513.761023] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 513.761350] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 513.761532] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 513.761735] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 513.761951] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 513.762172] env[61999]: DEBUG nova.virt.hardware [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 513.764618] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3071b171-5adb-47c9-91c1-1522924de0ca {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.776875] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75465cbe-5bbd-48c9-9950-5b51535f6848 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.929625] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 513.931029] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 513.935604] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.807s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.939071] env[61999]: INFO nova.compute.claims [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 514.067553] env[61999]: DEBUG oslo_concurrency.lockutils [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.172809] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024467} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 514.173073] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleted the datastore file {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 514.173821] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58800f5e-6954-4df4-a3ea-e9202fc5e9d5 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.179384] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 514.179384] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]525884cb-59ae-ca91-58cf-c430e36e4ea1" [ 514.179384] env[61999]: _type = "Task" [ 514.179384] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 514.188819] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]525884cb-59ae-ca91-58cf-c430e36e4ea1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 514.440150] env[61999]: DEBUG nova.compute.utils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 514.442730] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 514.442730] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 514.586926] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Successfully created port: f5a48c9e-a997-40fc-89d8-27d5dc7820e7 {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 514.690232] env[61999]: DEBUG nova.policy [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8041dabbe7543c9aea408f2e683e9d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c35acc702f3f437d84acbab933c833ce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 514.700881] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]525884cb-59ae-ca91-58cf-c430e36e4ea1, 'name': SearchDatastore_Task, 'duration_secs': 0.008166} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 514.701208] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 514.701526] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk to [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 514.701795] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7b06ba2-9318-4ea6-a3d0-5cd722a567e8 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.712338] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 514.712338] env[61999]: value = "task-1043183" [ 514.712338] env[61999]: _type = "Task" [ 514.712338] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 514.729136] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 514.949439] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 515.106076] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6282ccc-1072-42ae-850f-c0a0031a5f8c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.114457] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86b6f4c-6c59-47f9-b7b9-ee40adf3cc6d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.150475] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16238936-350a-4804-abb0-87bb5a0cf289 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.158583] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa5661b-3508-4d06-b9d2-f06c5e85d81a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.175033] env[61999]: DEBUG nova.compute.provider_tree [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 515.223077] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497976} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 515.223356] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk to [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 515.223571] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Extending root virtual disk to 1048576 {{(pid=61999) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 515.223817] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aadf3302-499e-4aa0-b354-c12ab95b5543 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.232779] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 515.232779] env[61999]: value = "task-1043184" [ 515.232779] env[61999]: _type = "Task" [ 515.232779] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 515.242518] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043184, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 515.304851] env[61999]: ERROR nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. [ 515.304851] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 515.304851] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.304851] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 515.304851] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.304851] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 515.304851] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.304851] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 515.304851] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.304851] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 515.304851] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.304851] env[61999]: ERROR nova.compute.manager raise self.value [ 515.304851] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.304851] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 515.304851] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.304851] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 515.305357] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.305357] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 515.305357] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. [ 515.305357] env[61999]: ERROR nova.compute.manager [ 515.305357] env[61999]: Traceback (most recent call last): [ 515.305357] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 515.305357] env[61999]: listener.cb(fileno) [ 515.305357] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.305357] env[61999]: result = function(*args, **kwargs) [ 515.305357] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.305357] env[61999]: return func(*args, **kwargs) [ 515.305357] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 515.305357] env[61999]: raise e [ 515.305357] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.305357] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 515.305357] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.305357] env[61999]: created_port_ids = self._update_ports_for_instance( [ 515.305357] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.305357] env[61999]: with excutils.save_and_reraise_exception(): [ 515.305357] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.305357] env[61999]: self.force_reraise() [ 515.305357] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.305357] env[61999]: raise self.value [ 515.305357] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.305357] env[61999]: updated_port = self._update_port( [ 515.305357] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.305357] env[61999]: _ensure_no_port_binding_failure(port) [ 515.305357] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.305357] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 515.306127] env[61999]: nova.exception.PortBindingFailed: Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. [ 515.306127] env[61999]: Removing descriptor: 15 [ 515.306127] env[61999]: ERROR nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Traceback (most recent call last): [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] yield resources [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self.driver.spawn(context, instance, image_meta, [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 515.306127] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] vm_ref = self.build_virtual_machine(instance, [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] vif_infos = vmwarevif.get_vif_info(self._session, [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] for vif in network_info: [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] return self._sync_wrapper(fn, *args, **kwargs) [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self.wait() [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self[:] = self._gt.wait() [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] return self._exit_event.wait() [ 515.306720] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] result = hub.switch() [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] return self.greenlet.switch() [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] result = function(*args, **kwargs) [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] return func(*args, **kwargs) [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] raise e [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] nwinfo = self.network_api.allocate_for_instance( [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.307123] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] created_port_ids = self._update_ports_for_instance( [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] with excutils.save_and_reraise_exception(): [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self.force_reraise() [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] raise self.value [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] updated_port = self._update_port( [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] _ensure_no_port_binding_failure(port) [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.307453] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] raise exception.PortBindingFailed(port_id=port['id']) [ 515.307800] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] nova.exception.PortBindingFailed: Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. [ 515.307800] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] [ 515.307800] env[61999]: INFO nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Terminating instance [ 515.679175] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 515.747841] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043184, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.21142} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 515.748113] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Extended root virtual disk {{(pid=61999) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 515.749329] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeedc6c7-1b03-481f-b566-6dd77df79349 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.800448] env[61999]: DEBUG nova.virt.vmwareapi.volumeops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk or device None with type sparse {{(pid=61999) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 515.800448] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff4053de-e1f6-49cc-8a99-3d016d2449b4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.815865] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Acquiring lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.816109] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Acquired lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.816567] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 515.825390] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 515.825390] env[61999]: value = "task-1043185" [ 515.825390] env[61999]: _type = "Task" [ 515.825390] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 515.836762] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043185, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 515.961448] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 515.989086] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 515.989350] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 515.989751] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 515.989751] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 515.989863] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 515.991199] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 515.991199] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 515.991199] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 515.991199] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 515.991199] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 515.991438] env[61999]: DEBUG nova.virt.hardware [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 515.992407] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b2a97c-fecf-41f5-a121-ac46b64b1c60 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.000910] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4d6314-0234-42fd-9c2d-5f615a3d22cc {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.185486] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 516.186097] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 516.191017] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.487s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.198175] env[61999]: INFO nova.compute.claims [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 516.240852] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Successfully created port: f242c8f1-bf66-4285-afd0-eac259344df8 {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 516.336233] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043185, 'name': ReconfigVM_Task, 'duration_secs': 0.313086} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 516.336233] env[61999]: DEBUG nova.virt.vmwareapi.volumeops [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk or device None with type sparse {{(pid=61999) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 516.336896] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74d4f0fa-1c6f-4cf4-9983-0788a1f65bc1 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.343698] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 516.343698] env[61999]: value = "task-1043186" [ 516.343698] env[61999]: _type = "Task" [ 516.343698] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 516.354491] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043186, 'name': Rename_Task} progress is 5%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 516.362041] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 516.449679] env[61999]: DEBUG nova.compute.manager [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Received event network-changed-006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 516.449888] env[61999]: DEBUG nova.compute.manager [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Refreshing instance network info cache due to event network-changed-006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 516.450154] env[61999]: DEBUG oslo_concurrency.lockutils [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] Acquiring lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 516.705138] env[61999]: DEBUG nova.compute.utils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 516.711035] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 516.711035] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 516.854421] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043186, 'name': Rename_Task, 'duration_secs': 0.129274} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 516.854765] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powering on the VM {{(pid=61999) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 516.855599] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 516.856802] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c992f97d-73e5-44bc-a668-6f014b2766f9 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.863955] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 516.863955] env[61999]: value = "task-1043187" [ 516.863955] env[61999]: _type = "Task" [ 516.863955] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 516.871878] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043187, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 517.008435] env[61999]: DEBUG nova.policy [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d3eec1984d9498b8420ede226bffd49', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a27309c2142442328e9491b1feb18b76', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 517.212914] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 517.362063] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Releasing lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 517.362063] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 517.362063] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 517.362063] env[61999]: DEBUG oslo_concurrency.lockutils [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] Acquired lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 517.362063] env[61999]: DEBUG nova.network.neutron [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Refreshing network info cache for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 517.366151] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0ca6915-03bf-48b8-a39a-fbd998f393e3 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.397254] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425c2d94-e2ab-415c-a9af-83cff2541083 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.417433] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c446852-7b0c-42d8-b28f-178d2dae3941 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.428546] env[61999]: DEBUG oslo_vmware.api [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043187, 'name': PowerOnVM_Task, 'duration_secs': 0.447049} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 517.433540] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powered on the VM {{(pid=61999) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 517.433540] env[61999]: INFO nova.compute.manager [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Took 8.31 seconds to spawn the instance on the hypervisor. [ 517.433540] env[61999]: DEBUG nova.compute.manager [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Checking state {{(pid=61999) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 517.436414] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752311f4-aab2-48fb-b860-8dfc543df463 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.439350] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 492a50f9-06bf-40b2-8746-1516c045b9b3 could not be found. [ 517.439546] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 517.439715] env[61999]: INFO nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Took 0.08 seconds to destroy the instance on the hypervisor. [ 517.439945] env[61999]: DEBUG oslo.service.loopingcall [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 517.441975] env[61999]: DEBUG nova.compute.manager [-] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 517.442101] env[61999]: DEBUG nova.network.neutron [-] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 517.446189] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e907520-c9d2-45b2-ab27-9c44d9ed8227 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.483871] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de77b20-e5ed-4297-acfc-2f93041322ee {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.497870] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e1d4e1-aaac-426f-8a02-546481352d66 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.509136] env[61999]: DEBUG nova.compute.provider_tree [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 517.575513] env[61999]: DEBUG nova.network.neutron [-] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 517.949258] env[61999]: DEBUG nova.network.neutron [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 517.959767] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Acquiring lock "75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.959890] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Lock "75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.969652] env[61999]: INFO nova.compute.manager [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Took 14.54 seconds to build instance. [ 518.011437] env[61999]: DEBUG nova.scheduler.client.report [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 518.079520] env[61999]: DEBUG nova.network.neutron [-] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.230253] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 518.263204] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 518.263598] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 518.263668] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 518.263917] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 518.264020] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 518.265896] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 518.265896] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 518.265896] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 518.265896] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 518.266105] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 518.266386] env[61999]: DEBUG nova.virt.hardware [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 518.268131] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27260f4a-1f11-47c1-8454-3b961e38090f {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.281600] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496cb5e2-a422-4543-a598-f514f8b2c06c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.467683] env[61999]: DEBUG nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 518.471372] env[61999]: DEBUG oslo_concurrency.lockutils [None req-fddc1ac0-7cf9-4613-9c43-7471e61926c1 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Lock "1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.048s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.517752] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.518197] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 518.526700] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.289s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.582028] env[61999]: INFO nova.compute.manager [-] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Took 1.14 seconds to deallocate network for instance. [ 518.588076] env[61999]: DEBUG nova.compute.claims [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 518.588676] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.633417] env[61999]: DEBUG nova.network.neutron [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.996994] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.034066] env[61999]: DEBUG nova.compute.utils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 519.044869] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 519.045157] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 519.140470] env[61999]: DEBUG oslo_concurrency.lockutils [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] Releasing lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.140470] env[61999]: DEBUG nova.compute.manager [req-cd807c05-1aab-406b-9bb6-b593adefa3a5 req-509a772f-1daa-4a95-a749-454123a5ff5a service nova] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Received event network-vif-deleted-006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 519.169440] env[61999]: DEBUG nova.policy [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5066754a59584f1e873a6c7e666d0d7d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '691059cc50384d9faf7c86a5721d412a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 519.206759] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a0b6ec-2b37-4b76-9f30-b4ee265d8a0a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.226709] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0f84a9-5785-4f2f-bc77-7e008874edbc {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.263434] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebabb0c8-f931-41e7-9a3d-0192dd8535bf {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.270711] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d573153-cb4d-421b-bcea-884e142a7d63 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.284866] env[61999]: DEBUG nova.compute.provider_tree [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 519.391519] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Successfully created port: 1adcc44d-200e-4a63-9f0d-09ff6428bad7 {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 519.544472] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 519.789551] env[61999]: DEBUG nova.scheduler.client.report [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 520.216497] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Acquiring lock "fdb42889-b8ef-4a8a-a7f4-17397f29abd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.216497] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Lock "fdb42889-b8ef-4a8a-a7f4-17397f29abd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.295574] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.768s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.295574] env[61999]: ERROR nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. [ 520.295574] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Traceback (most recent call last): [ 520.295574] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 520.295574] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self.driver.spawn(context, instance, image_meta, [ 520.295574] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 520.295574] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 520.295574] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 520.295574] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] vm_ref = self.build_virtual_machine(instance, [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] vif_infos = vmwarevif.get_vif_info(self._session, [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] for vif in network_info: [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] return self._sync_wrapper(fn, *args, **kwargs) [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self.wait() [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self[:] = self._gt.wait() [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] return self._exit_event.wait() [ 520.295960] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] result = hub.switch() [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] return self.greenlet.switch() [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] result = function(*args, **kwargs) [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] return func(*args, **kwargs) [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] raise e [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] nwinfo = self.network_api.allocate_for_instance( [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 520.296317] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] created_port_ids = self._update_ports_for_instance( [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] with excutils.save_and_reraise_exception(): [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] self.force_reraise() [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] raise self.value [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] updated_port = self._update_port( [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] _ensure_no_port_binding_failure(port) [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.297850] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] raise exception.PortBindingFailed(port_id=port['id']) [ 520.298180] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] nova.exception.PortBindingFailed: Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. [ 520.298180] env[61999]: ERROR nova.compute.manager [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] [ 520.298180] env[61999]: DEBUG nova.compute.utils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. {{(pid=61999) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 520.298180] env[61999]: DEBUG oslo_concurrency.lockutils [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.231s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.298350] env[61999]: DEBUG oslo_concurrency.lockutils [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.303025] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61999) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 520.303025] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.712s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.303679] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de172d71-be72-4a07-bb48-ce1f7724de86 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.308870] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Build of instance 77195a41-5390-4d50-b9e9-43f4e586fe2e was re-scheduled: Binding failed for port d8e432a4-6e78-4bea-9ce8-5d3e18da0476, please check neutron logs for more information. {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 520.309721] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Unplugging VIFs for instance {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 520.312018] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Acquiring lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 520.312018] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Acquired lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 520.312018] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 520.315648] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f227f2-5681-435a-8672-465b5c7ee733 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.340895] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d1055d-09f8-4a0d-ae39-e0ebe4f75ea4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.352976] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42317d93-a209-4a44-9136-64f4237c3bc2 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.394202] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181600MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61999) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 520.394265] env[61999]: DEBUG oslo_concurrency.lockutils [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.562164] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 520.586366] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 520.586632] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 520.586787] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 520.586964] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 520.587122] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 520.587268] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 520.587469] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 520.587624] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 520.587787] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 520.587945] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 520.588492] env[61999]: DEBUG nova.virt.hardware [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 520.589428] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e71be40-817e-4f7f-bf0c-b7fd88a51249 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.597440] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690f28cc-9381-4c75-a2af-a2c4541dd53f {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.718533] env[61999]: DEBUG nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 520.941017] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d66c23c-1037-402a-8c87-2d652b56c66e {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.949078] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f9720c-fb76-423e-8bea-df84d0beb944 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.953012] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 520.987528] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2a5a55-8445-4b21-bc99-da90f7fe8e61 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.998420] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7edacf3-4325-42d0-b5a8-4a3e2c703594 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.014142] env[61999]: DEBUG nova.compute.provider_tree [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.026115] env[61999]: ERROR nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. [ 521.026115] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 521.026115] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 521.026115] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 521.026115] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 521.026115] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 521.026115] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 521.026115] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 521.026115] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.026115] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 521.026115] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.026115] env[61999]: ERROR nova.compute.manager raise self.value [ 521.026115] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 521.026115] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 521.026115] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.026115] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 521.027105] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.027105] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 521.027105] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. [ 521.027105] env[61999]: ERROR nova.compute.manager [ 521.027105] env[61999]: Traceback (most recent call last): [ 521.027105] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 521.027105] env[61999]: listener.cb(fileno) [ 521.027105] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.027105] env[61999]: result = function(*args, **kwargs) [ 521.027105] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 521.027105] env[61999]: return func(*args, **kwargs) [ 521.027105] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 521.027105] env[61999]: raise e [ 521.027105] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 521.027105] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 521.027105] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 521.027105] env[61999]: created_port_ids = self._update_ports_for_instance( [ 521.027105] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 521.027105] env[61999]: with excutils.save_and_reraise_exception(): [ 521.027105] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.027105] env[61999]: self.force_reraise() [ 521.027105] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.027105] env[61999]: raise self.value [ 521.027105] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 521.027105] env[61999]: updated_port = self._update_port( [ 521.027105] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.027105] env[61999]: _ensure_no_port_binding_failure(port) [ 521.027105] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.027105] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 521.028376] env[61999]: nova.exception.PortBindingFailed: Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. [ 521.028376] env[61999]: Removing descriptor: 17 [ 521.028376] env[61999]: ERROR nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Traceback (most recent call last): [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] yield resources [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self.driver.spawn(context, instance, image_meta, [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self._vmops.spawn(context, instance, image_meta, injected_files, [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 521.028376] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] vm_ref = self.build_virtual_machine(instance, [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] vif_infos = vmwarevif.get_vif_info(self._session, [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] for vif in network_info: [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] return self._sync_wrapper(fn, *args, **kwargs) [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self.wait() [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self[:] = self._gt.wait() [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] return self._exit_event.wait() [ 521.028929] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] result = hub.switch() [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] return self.greenlet.switch() [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] result = function(*args, **kwargs) [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] return func(*args, **kwargs) [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] raise e [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] nwinfo = self.network_api.allocate_for_instance( [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 521.029499] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] created_port_ids = self._update_ports_for_instance( [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] with excutils.save_and_reraise_exception(): [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self.force_reraise() [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] raise self.value [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] updated_port = self._update_port( [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] _ensure_no_port_binding_failure(port) [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.030064] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] raise exception.PortBindingFailed(port_id=port['id']) [ 521.030479] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] nova.exception.PortBindingFailed: Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. [ 521.030479] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] [ 521.030479] env[61999]: INFO nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Terminating instance [ 521.240603] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.266107] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 521.338893] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Successfully created port: 2b139f42-4096-4b41-9635-757e2d880205 {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.517753] env[61999]: DEBUG nova.scheduler.client.report [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 521.531120] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Acquiring lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.531469] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Acquired lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.531553] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 521.768407] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Releasing lock "refresh_cache-77195a41-5390-4d50-b9e9-43f4e586fe2e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 521.768690] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 521.768690] env[61999]: DEBUG nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 521.768900] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 521.773019] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Acquiring lock "410c01fe-54f7-4e39-8689-aa2dbe7f1c7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.773249] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Lock "410c01fe-54f7-4e39-8689-aa2dbe7f1c7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.850852] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 522.022986] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.723s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.023656] env[61999]: ERROR nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Traceback (most recent call last): [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self.driver.spawn(context, instance, image_meta, [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] vm_ref = self.build_virtual_machine(instance, [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] vif_infos = vmwarevif.get_vif_info(self._session, [ 522.023656] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] for vif in network_info: [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] return self._sync_wrapper(fn, *args, **kwargs) [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self.wait() [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self[:] = self._gt.wait() [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] return self._exit_event.wait() [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] result = hub.switch() [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 522.024044] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] return self.greenlet.switch() [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] result = function(*args, **kwargs) [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] return func(*args, **kwargs) [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] raise e [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] nwinfo = self.network_api.allocate_for_instance( [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] created_port_ids = self._update_ports_for_instance( [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] with excutils.save_and_reraise_exception(): [ 522.024619] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] self.force_reraise() [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] raise self.value [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] updated_port = self._update_port( [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] _ensure_no_port_binding_failure(port) [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] raise exception.PortBindingFailed(port_id=port['id']) [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] nova.exception.PortBindingFailed: Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. [ 522.024989] env[61999]: ERROR nova.compute.manager [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] [ 522.025578] env[61999]: DEBUG nova.compute.utils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. {{(pid=61999) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 522.025578] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.029s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.028866] env[61999]: INFO nova.compute.claims [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.032019] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Build of instance 492a50f9-06bf-40b2-8746-1516c045b9b3 was re-scheduled: Binding failed for port 006a7bdf-1aa3-4f3a-9aa6-dca0c2892e8e, please check neutron logs for more information. {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 522.032095] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Unplugging VIFs for instance {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 522.032615] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Acquiring lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 522.032615] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Acquired lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.035831] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 522.103294] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 522.276237] env[61999]: DEBUG nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 522.355596] env[61999]: DEBUG nova.network.neutron [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.380490] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Acquiring lock "4736bfe3-2a45-4fd8-8777-9ab1d2800197" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.380821] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Lock "4736bfe3-2a45-4fd8-8777-9ab1d2800197" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.397969] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.663600] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 522.812615] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.858954] env[61999]: INFO nova.compute.manager [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] [instance: 77195a41-5390-4d50-b9e9-43f4e586fe2e] Took 1.09 seconds to deallocate network for instance. [ 522.884871] env[61999]: DEBUG nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 522.903194] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Releasing lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.903194] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 522.903194] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 522.903194] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a8ebd56-41d0-4460-804c-c7dc63e9cd93 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.911209] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167b05c4-4094-4c23-8e4b-d870a3b149d5 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.937294] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181 could not be found. [ 522.937537] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 522.937716] env[61999]: INFO nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Took 0.04 seconds to destroy the instance on the hypervisor. [ 522.937949] env[61999]: DEBUG oslo.service.loopingcall [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 522.938509] env[61999]: DEBUG nova.compute.manager [-] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 522.938644] env[61999]: DEBUG nova.network.neutron [-] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 522.942019] env[61999]: INFO nova.compute.manager [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Rebuilding instance [ 522.984957] env[61999]: DEBUG nova.network.neutron [-] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 523.009189] env[61999]: DEBUG nova.compute.manager [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Checking state {{(pid=61999) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 523.010126] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1642d2af-32f7-48f8-a40e-02b21d8320f8 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.094400] env[61999]: DEBUG nova.compute.manager [req-5bd158d6-1989-406a-b64c-0b8f7702754d req-26e89bf7-ab34-4eff-8745-fc7316769632 service nova] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Received event network-changed-f5a48c9e-a997-40fc-89d8-27d5dc7820e7 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 523.094592] env[61999]: DEBUG nova.compute.manager [req-5bd158d6-1989-406a-b64c-0b8f7702754d req-26e89bf7-ab34-4eff-8745-fc7316769632 service nova] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Refreshing instance network info cache due to event network-changed-f5a48c9e-a997-40fc-89d8-27d5dc7820e7. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 523.094828] env[61999]: DEBUG oslo_concurrency.lockutils [req-5bd158d6-1989-406a-b64c-0b8f7702754d req-26e89bf7-ab34-4eff-8745-fc7316769632 service nova] Acquiring lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.095059] env[61999]: DEBUG oslo_concurrency.lockutils [req-5bd158d6-1989-406a-b64c-0b8f7702754d req-26e89bf7-ab34-4eff-8745-fc7316769632 service nova] Acquired lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.095262] env[61999]: DEBUG nova.network.neutron [req-5bd158d6-1989-406a-b64c-0b8f7702754d req-26e89bf7-ab34-4eff-8745-fc7316769632 service nova] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Refreshing network info cache for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7 {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 523.107526] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.240171] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7aed95-8b1c-4aa6-b011-0f611688489b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.248935] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4103f498-7bb4-4c7f-8521-538c3561c708 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.281773] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44c2a90-68e6-4507-b4f1-0bd678ba9d59 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.289789] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb93e127-dbbf-4341-9638-25d066278ede {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.303263] env[61999]: DEBUG nova.compute.provider_tree [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 523.388538] env[61999]: ERROR nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. [ 523.388538] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 523.388538] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.388538] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 523.388538] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.388538] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 523.388538] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.388538] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 523.388538] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.388538] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 523.388538] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.388538] env[61999]: ERROR nova.compute.manager raise self.value [ 523.388538] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.388538] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 523.388538] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.388538] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 523.388993] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.388993] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 523.388993] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. [ 523.388993] env[61999]: ERROR nova.compute.manager [ 523.388993] env[61999]: Traceback (most recent call last): [ 523.388993] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 523.388993] env[61999]: listener.cb(fileno) [ 523.388993] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.388993] env[61999]: result = function(*args, **kwargs) [ 523.388993] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 523.388993] env[61999]: return func(*args, **kwargs) [ 523.388993] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 523.388993] env[61999]: raise e [ 523.388993] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.388993] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 523.388993] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.388993] env[61999]: created_port_ids = self._update_ports_for_instance( [ 523.388993] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.388993] env[61999]: with excutils.save_and_reraise_exception(): [ 523.388993] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.388993] env[61999]: self.force_reraise() [ 523.388993] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.388993] env[61999]: raise self.value [ 523.388993] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.388993] env[61999]: updated_port = self._update_port( [ 523.388993] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.388993] env[61999]: _ensure_no_port_binding_failure(port) [ 523.388993] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.388993] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 523.389785] env[61999]: nova.exception.PortBindingFailed: Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. [ 523.389785] env[61999]: Removing descriptor: 16 [ 523.389785] env[61999]: ERROR nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Traceback (most recent call last): [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] yield resources [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self.driver.spawn(context, instance, image_meta, [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 523.389785] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] vm_ref = self.build_virtual_machine(instance, [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] for vif in network_info: [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] return self._sync_wrapper(fn, *args, **kwargs) [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self.wait() [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self[:] = self._gt.wait() [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] return self._exit_event.wait() [ 523.390105] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] result = hub.switch() [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] return self.greenlet.switch() [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] result = function(*args, **kwargs) [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] return func(*args, **kwargs) [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] raise e [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] nwinfo = self.network_api.allocate_for_instance( [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.390428] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] created_port_ids = self._update_ports_for_instance( [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] with excutils.save_and_reraise_exception(): [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self.force_reraise() [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] raise self.value [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] updated_port = self._update_port( [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] _ensure_no_port_binding_failure(port) [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.390950] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] raise exception.PortBindingFailed(port_id=port['id']) [ 523.391260] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] nova.exception.PortBindingFailed: Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. [ 523.391260] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] [ 523.391260] env[61999]: INFO nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Terminating instance [ 523.421307] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.490165] env[61999]: DEBUG nova.network.neutron [-] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.613905] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Releasing lock "refresh_cache-492a50f9-06bf-40b2-8746-1516c045b9b3" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.613905] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 523.613905] env[61999]: DEBUG nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 523.613905] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 523.673264] env[61999]: DEBUG nova.network.neutron [req-5bd158d6-1989-406a-b64c-0b8f7702754d req-26e89bf7-ab34-4eff-8745-fc7316769632 service nova] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 523.685019] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 523.808202] env[61999]: DEBUG nova.scheduler.client.report [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 523.902473] env[61999]: INFO nova.scheduler.client.report [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Deleted allocations for instance 77195a41-5390-4d50-b9e9-43f4e586fe2e [ 523.909111] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Acquiring lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.909589] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Acquired lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.909684] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 523.922082] env[61999]: DEBUG nova.compute.manager [req-a8c3417c-fb40-4f81-8692-0a245ee20d35 req-0f5550d8-8388-41c4-a8eb-f4f12c5cff46 service nova] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Received event network-changed-f242c8f1-bf66-4285-afd0-eac259344df8 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 523.922082] env[61999]: DEBUG nova.compute.manager [req-a8c3417c-fb40-4f81-8692-0a245ee20d35 req-0f5550d8-8388-41c4-a8eb-f4f12c5cff46 service nova] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Refreshing instance network info cache due to event network-changed-f242c8f1-bf66-4285-afd0-eac259344df8. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 523.922082] env[61999]: DEBUG oslo_concurrency.lockutils [req-a8c3417c-fb40-4f81-8692-0a245ee20d35 req-0f5550d8-8388-41c4-a8eb-f4f12c5cff46 service nova] Acquiring lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.926088] env[61999]: DEBUG nova.network.neutron [req-5bd158d6-1989-406a-b64c-0b8f7702754d req-26e89bf7-ab34-4eff-8745-fc7316769632 service nova] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.995029] env[61999]: INFO nova.compute.manager [-] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Took 1.06 seconds to deallocate network for instance. [ 523.997653] env[61999]: DEBUG nova.compute.claims [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 523.997828] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.032780] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powering off the VM {{(pid=61999) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 524.033942] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dc54716-e23e-4938-87a3-4a84698d67a4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.043044] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 524.043044] env[61999]: value = "task-1043188" [ 524.043044] env[61999]: _type = "Task" [ 524.043044] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.055779] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043188, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.188036] env[61999]: DEBUG nova.network.neutron [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.313947] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.288s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.314546] env[61999]: DEBUG nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 524.317424] env[61999]: DEBUG oslo_concurrency.lockutils [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.923s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.419383] env[61999]: DEBUG oslo_concurrency.lockutils [None req-63b832cd-4933-4bdd-9ac7-aab41cf3706f tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Lock "77195a41-5390-4d50-b9e9-43f4e586fe2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.824s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.427432] env[61999]: DEBUG oslo_concurrency.lockutils [req-5bd158d6-1989-406a-b64c-0b8f7702754d req-26e89bf7-ab34-4eff-8745-fc7316769632 service nova] Releasing lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.469521] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 524.554164] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043188, 'name': PowerOffVM_Task, 'duration_secs': 0.123304} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 524.554467] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powered off the VM {{(pid=61999) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 524.554782] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 524.555497] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f19b758-6684-44b8-aa64-ce96bd97a8ba {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.562451] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Unregistering the VM {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 524.562679] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f02c42fb-93e1-44b6-8bfd-89f523abad76 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.587035] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Unregistered the VM {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 524.587240] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Deleting contents of the VM from datastore datastore2 {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 524.587436] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleting the datastore file [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5 {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 524.587687] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f616765-2e67-46d3-a54a-996fb7618c49 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.595347] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 524.595347] env[61999]: value = "task-1043190" [ 524.595347] env[61999]: _type = "Task" [ 524.595347] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.603991] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.691396] env[61999]: INFO nova.compute.manager [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] [instance: 492a50f9-06bf-40b2-8746-1516c045b9b3] Took 1.08 seconds to deallocate network for instance. [ 524.727840] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.825185] env[61999]: DEBUG nova.compute.utils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 524.829893] env[61999]: DEBUG nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 524.829893] env[61999]: DEBUG nova.network.neutron [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 525.109154] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089951} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 525.111150] env[61999]: DEBUG nova.policy [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaa9abb01af9449aa2999c135f2cde03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3d26f736526425c9d7a6efb9dfbdc41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 525.113361] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleted the datastore file {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 525.113582] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Deleted contents of the VM from datastore datastore2 {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 525.113802] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 525.230788] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Releasing lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.231347] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 525.231477] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 525.231781] env[61999]: DEBUG oslo_concurrency.lockutils [req-a8c3417c-fb40-4f81-8692-0a245ee20d35 req-0f5550d8-8388-41c4-a8eb-f4f12c5cff46 service nova] Acquired lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.231943] env[61999]: DEBUG nova.network.neutron [req-a8c3417c-fb40-4f81-8692-0a245ee20d35 req-0f5550d8-8388-41c4-a8eb-f4f12c5cff46 service nova] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Refreshing network info cache for port f242c8f1-bf66-4285-afd0-eac259344df8 {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 525.233055] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52153bb6-059b-48bf-956a-d0a104e0d9e0 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.255855] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb7dfe9-a70a-4298-bab8-080539db1c7e {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.281300] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4 could not be found. [ 525.281555] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 525.282496] env[61999]: INFO nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 525.282496] env[61999]: DEBUG oslo.service.loopingcall [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 525.282496] env[61999]: DEBUG nova.compute.manager [-] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 525.282496] env[61999]: DEBUG nova.network.neutron [-] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 525.331264] env[61999]: DEBUG nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 525.343677] env[61999]: DEBUG nova.network.neutron [-] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 525.372840] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 525.656622] env[61999]: ERROR nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 525.656622] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 525.656622] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 525.656622] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 525.656622] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 525.656622] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 525.656622] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 525.656622] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 525.656622] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.656622] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 525.656622] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.656622] env[61999]: ERROR nova.compute.manager raise self.value [ 525.656622] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 525.656622] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 525.656622] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.656622] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 525.657367] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.657367] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 525.657367] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 525.657367] env[61999]: ERROR nova.compute.manager [ 525.657367] env[61999]: Traceback (most recent call last): [ 525.657367] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 525.657367] env[61999]: listener.cb(fileno) [ 525.657367] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 525.657367] env[61999]: result = function(*args, **kwargs) [ 525.657367] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 525.657367] env[61999]: return func(*args, **kwargs) [ 525.657367] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 525.657367] env[61999]: raise e [ 525.657367] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 525.657367] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 525.657367] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 525.657367] env[61999]: created_port_ids = self._update_ports_for_instance( [ 525.657367] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 525.657367] env[61999]: with excutils.save_and_reraise_exception(): [ 525.657367] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.657367] env[61999]: self.force_reraise() [ 525.657367] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.657367] env[61999]: raise self.value [ 525.657367] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 525.657367] env[61999]: updated_port = self._update_port( [ 525.657367] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.657367] env[61999]: _ensure_no_port_binding_failure(port) [ 525.657367] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.657367] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 525.658091] env[61999]: nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 525.658091] env[61999]: Removing descriptor: 18 [ 525.658091] env[61999]: ERROR nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Traceback (most recent call last): [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] yield resources [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self.driver.spawn(context, instance, image_meta, [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self._vmops.spawn(context, instance, image_meta, injected_files, [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 525.658091] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] vm_ref = self.build_virtual_machine(instance, [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] vif_infos = vmwarevif.get_vif_info(self._session, [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] for vif in network_info: [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] return self._sync_wrapper(fn, *args, **kwargs) [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self.wait() [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self[:] = self._gt.wait() [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] return self._exit_event.wait() [ 525.658510] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] result = hub.switch() [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] return self.greenlet.switch() [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] result = function(*args, **kwargs) [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] return func(*args, **kwargs) [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] raise e [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] nwinfo = self.network_api.allocate_for_instance( [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 525.658845] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] created_port_ids = self._update_ports_for_instance( [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] with excutils.save_and_reraise_exception(): [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self.force_reraise() [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] raise self.value [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] updated_port = self._update_port( [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] _ensure_no_port_binding_failure(port) [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.659186] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] raise exception.PortBindingFailed(port_id=port['id']) [ 525.659498] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 525.659498] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] [ 525.659498] env[61999]: INFO nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Terminating instance [ 525.752477] env[61999]: INFO nova.scheduler.client.report [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Deleted allocations for instance 492a50f9-06bf-40b2-8746-1516c045b9b3 [ 525.790786] env[61999]: DEBUG nova.network.neutron [req-a8c3417c-fb40-4f81-8692-0a245ee20d35 req-0f5550d8-8388-41c4-a8eb-f4f12c5cff46 service nova] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 525.846105] env[61999]: DEBUG nova.network.neutron [-] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.875937] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance 492a50f9-06bf-40b2-8746-1516c045b9b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 525.875937] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 525.876130] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 525.876163] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance 91c68a8b-8a6b-4e31-8b76-4da94fa66748 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 525.876661] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance bf8cf38f-4305-4f7b-a262-e3e7e863f3d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 525.876661] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 525.941337] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Acquiring lock "491c456d-5e90-46ff-80cf-6cda1d8f657a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.941570] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Lock "491c456d-5e90-46ff-80cf-6cda1d8f657a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.036179] env[61999]: DEBUG nova.network.neutron [req-a8c3417c-fb40-4f81-8692-0a245ee20d35 req-0f5550d8-8388-41c4-a8eb-f4f12c5cff46 service nova] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.165437] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Acquiring lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.165608] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Acquired lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.165786] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 526.180725] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 526.180851] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 526.181018] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 526.181203] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 526.181347] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 526.182029] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 526.182029] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 526.182029] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 526.182029] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 526.182341] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 526.182389] env[61999]: DEBUG nova.virt.hardware [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 526.183323] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85557c93-2ded-48b3-a34a-02e274a812dd {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.194344] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b7e0e5-3177-413b-a1e5-075951b3a72d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.209985] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Instance VIF info [] {{(pid=61999) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 526.216517] env[61999]: DEBUG oslo.service.loopingcall [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 526.220019] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Creating VM on the ESX host {{(pid=61999) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 526.220019] env[61999]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f80b2ca1-4790-4a73-8930-f709e23b8e45 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.237287] env[61999]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 526.237287] env[61999]: value = "task-1043191" [ 526.237287] env[61999]: _type = "Task" [ 526.237287] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.246605] env[61999]: DEBUG oslo_vmware.api [-] Task: {'id': task-1043191, 'name': CreateVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.261884] env[61999]: DEBUG oslo_concurrency.lockutils [None req-1dd89e5c-a529-49db-83ac-1446140fce47 tempest-ServerAddressesNegativeTestJSON-2036111769 tempest-ServerAddressesNegativeTestJSON-2036111769-project-member] Lock "492a50f9-06bf-40b2-8746-1516c045b9b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.009s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 526.342563] env[61999]: DEBUG nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 526.350337] env[61999]: INFO nova.compute.manager [-] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Took 1.07 seconds to deallocate network for instance. [ 526.354490] env[61999]: DEBUG nova.compute.claims [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 526.354754] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.371599] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 526.372008] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 526.372249] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 526.374320] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 526.376042] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 526.376042] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 526.376042] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 526.376042] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 526.376042] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 526.377010] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 526.377010] env[61999]: DEBUG nova.virt.hardware [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 526.377371] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf1c929-04b7-413f-be62-91c5b498d908 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.384248] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance fdb42889-b8ef-4a8a-a7f4-17397f29abd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 526.396894] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7794c6-c757-4e4c-9fb3-79e1d33f11a2 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.445376] env[61999]: DEBUG nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 526.540885] env[61999]: DEBUG oslo_concurrency.lockutils [req-a8c3417c-fb40-4f81-8692-0a245ee20d35 req-0f5550d8-8388-41c4-a8eb-f4f12c5cff46 service nova] Releasing lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.674953] env[61999]: DEBUG nova.network.neutron [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Successfully created port: cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 526.739987] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 526.753353] env[61999]: DEBUG oslo_vmware.api [-] Task: {'id': task-1043191, 'name': CreateVM_Task, 'duration_secs': 0.3067} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.754096] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Created VM on the ESX host {{(pid=61999) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 526.757913] env[61999]: DEBUG oslo_vmware.service [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096bf9fa-a67a-4155-ae5f-488f1eb4c65a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.767216] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.767216] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.767788] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 526.767788] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43f76f41-6b11-4ff4-a2c0-ab26fb3815ea {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.773600] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 526.773600] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]52535248-a3ee-f3f8-55b3-ab29b81e9173" [ 526.773600] env[61999]: _type = "Task" [ 526.773600] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.782335] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]52535248-a3ee-f3f8-55b3-ab29b81e9173, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.902561] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 526.958020] env[61999]: ERROR nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. [ 526.958020] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 526.958020] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 526.958020] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 526.958020] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.958020] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 526.958020] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.958020] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 526.958020] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.958020] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 526.958020] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.958020] env[61999]: ERROR nova.compute.manager raise self.value [ 526.958020] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.958020] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 526.958020] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.958020] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 526.958656] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.958656] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 526.958656] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. [ 526.958656] env[61999]: ERROR nova.compute.manager [ 526.962270] env[61999]: Traceback (most recent call last): [ 526.962270] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 526.962270] env[61999]: listener.cb(fileno) [ 526.962270] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.962270] env[61999]: result = function(*args, **kwargs) [ 526.962270] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 526.962270] env[61999]: return func(*args, **kwargs) [ 526.962270] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 526.962270] env[61999]: raise e [ 526.962270] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 526.962270] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 526.962270] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.962270] env[61999]: created_port_ids = self._update_ports_for_instance( [ 526.962270] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.962270] env[61999]: with excutils.save_and_reraise_exception(): [ 526.962270] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.962270] env[61999]: self.force_reraise() [ 526.962270] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.962270] env[61999]: raise self.value [ 526.962782] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.962782] env[61999]: updated_port = self._update_port( [ 526.962782] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.962782] env[61999]: _ensure_no_port_binding_failure(port) [ 526.962782] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.962782] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 526.962782] env[61999]: nova.exception.PortBindingFailed: Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. [ 526.962782] env[61999]: Removing descriptor: 15 [ 526.963084] env[61999]: ERROR nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Traceback (most recent call last): [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] yield resources [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self.driver.spawn(context, instance, image_meta, [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] vm_ref = self.build_virtual_machine(instance, [ 526.963084] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] for vif in network_info: [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] return self._sync_wrapper(fn, *args, **kwargs) [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self.wait() [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self[:] = self._gt.wait() [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] return self._exit_event.wait() [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 526.963399] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] result = hub.switch() [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] return self.greenlet.switch() [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] result = function(*args, **kwargs) [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] return func(*args, **kwargs) [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] raise e [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] nwinfo = self.network_api.allocate_for_instance( [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] created_port_ids = self._update_ports_for_instance( [ 526.963728] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] with excutils.save_and_reraise_exception(): [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self.force_reraise() [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] raise self.value [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] updated_port = self._update_port( [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] _ensure_no_port_binding_failure(port) [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] raise exception.PortBindingFailed(port_id=port['id']) [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] nova.exception.PortBindingFailed: Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. [ 526.964081] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] [ 526.964424] env[61999]: INFO nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Terminating instance [ 526.989238] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.992681] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.089508] env[61999]: DEBUG nova.compute.manager [req-c5975dbc-0ce2-4850-83af-0c1150a05917 req-74b76004-06a0-4660-9382-563c4a89aafd service nova] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Received event network-vif-deleted-f5a48c9e-a997-40fc-89d8-27d5dc7820e7 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 527.287194] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.287535] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Processing image 9eed6d08-b529-4317-89cc-ae13b2d60cea {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 527.288170] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.288170] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.288279] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 527.289014] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e62d41f8-ea2f-4f2c-b434-0547f9044ce8 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.307287] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 527.307475] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61999) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 527.308337] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a1d0d1-c27d-4103-ac9b-ecf05e59a0eb {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.316718] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9100cb94-5aa8-45d9-a6b8-d2ddf59f9dcb {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.328683] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 527.328683] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]52cbbc95-0d0c-580b-e95e-a500667b0915" [ 527.328683] env[61999]: _type = "Task" [ 527.328683] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.335602] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]52cbbc95-0d0c-580b-e95e-a500667b0915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.412375] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Instance 4736bfe3-2a45-4fd8-8777-9ab1d2800197 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61999) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 527.412647] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61999) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 527.412840] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61999) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 527.474762] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Acquiring lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.474948] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Acquired lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.475171] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 527.502480] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Releasing lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.502480] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 527.502480] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 527.503130] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f3b1a477-f5bb-49e6-86c3-bdae31d74a70 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.516308] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f682d5-08df-403f-9ba1-fe2a9a147175 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.551906] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 91c68a8b-8a6b-4e31-8b76-4da94fa66748 could not be found. [ 527.552566] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 527.553258] env[61999]: INFO nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Took 0.05 seconds to destroy the instance on the hypervisor. [ 527.553743] env[61999]: DEBUG oslo.service.loopingcall [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 527.558798] env[61999]: DEBUG nova.compute.manager [-] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 527.558798] env[61999]: DEBUG nova.network.neutron [-] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 527.599453] env[61999]: DEBUG nova.network.neutron [-] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 527.677104] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4a2960-d27d-4597-9b51-9f3ef6ac5424 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.688956] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddaf9e7b-7b61-48f7-b239-dbd5a24f7a32 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.694268] env[61999]: DEBUG nova.compute.manager [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Received event network-vif-deleted-f242c8f1-bf66-4285-afd0-eac259344df8 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 527.694577] env[61999]: DEBUG nova.compute.manager [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Received event network-changed-1adcc44d-200e-4a63-9f0d-09ff6428bad7 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 527.694767] env[61999]: DEBUG nova.compute.manager [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Refreshing instance network info cache due to event network-changed-1adcc44d-200e-4a63-9f0d-09ff6428bad7. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 527.695090] env[61999]: DEBUG oslo_concurrency.lockutils [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] Acquiring lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.695857] env[61999]: DEBUG oslo_concurrency.lockutils [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] Acquired lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.695857] env[61999]: DEBUG nova.network.neutron [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Refreshing network info cache for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7 {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 527.732227] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff38e76-ad72-4ce2-8c8d-d02ceb7a0c08 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.741663] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b02a48-1acd-4129-b326-4302ab8dedb5 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.758791] env[61999]: DEBUG nova.compute.provider_tree [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 527.783744] env[61999]: DEBUG oslo_concurrency.lockutils [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Acquiring lock "92b4eec6-d38d-465e-a575-62f900400f7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.783971] env[61999]: DEBUG oslo_concurrency.lockutils [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Lock "92b4eec6-d38d-465e-a575-62f900400f7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.842821] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Preparing fetch location {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 527.846032] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating directory with path [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 527.846032] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-643d48a1-2f34-4f18-b75d-8cae085d768d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.867748] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Created directory with path [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 527.867955] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Fetch image to [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 527.868132] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Downloading image file data 9eed6d08-b529-4317-89cc-ae13b2d60cea to [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk on the data store datastore1 {{(pid=61999) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 527.869240] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3af97fe-778d-4cd7-86ca-15a8854e5953 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.876993] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6261c9a7-033e-49c5-be0c-3d849a69d1d6 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.887597] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36e3a93-444d-4085-ab63-54fc6b4bf35b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.922132] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9741580-7e7f-49d0-88d7-e243f38623c4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.930001] env[61999]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-490e68cc-114e-4faf-8f06-433860442ec5 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.956170] env[61999]: DEBUG nova.virt.vmwareapi.images [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Downloading image file data 9eed6d08-b529-4317-89cc-ae13b2d60cea to the data store datastore1 {{(pid=61999) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 527.995792] env[61999]: DEBUG oslo_concurrency.lockutils [None req-787d7f22-ed8c-4c06-9773-261aed97575a tempest-ServersTestManualDisk-2007130817 tempest-ServersTestManualDisk-2007130817-project-member] Acquiring lock "8e68085f-4bbd-480c-a925-e8714326ee9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.995792] env[61999]: DEBUG oslo_concurrency.lockutils [None req-787d7f22-ed8c-4c06-9773-261aed97575a tempest-ServersTestManualDisk-2007130817 tempest-ServersTestManualDisk-2007130817-project-member] Lock "8e68085f-4bbd-480c-a925-e8714326ee9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.002383] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.104847] env[61999]: DEBUG nova.network.neutron [-] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.138697] env[61999]: DEBUG oslo_vmware.rw_handles [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61999) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 528.256845] env[61999]: DEBUG nova.network.neutron [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.261303] env[61999]: DEBUG nova.scheduler.client.report [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 528.286345] env[61999]: DEBUG nova.compute.manager [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 528.388196] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.475337] env[61999]: DEBUG nova.network.neutron [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.609620] env[61999]: INFO nova.compute.manager [-] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Took 1.05 seconds to deallocate network for instance. [ 528.615973] env[61999]: DEBUG nova.compute.claims [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 528.616237] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.767174] env[61999]: DEBUG nova.compute.resource_tracker [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61999) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 528.768028] env[61999]: DEBUG oslo_concurrency.lockutils [None req-f3342a45-98d6-4e73-8462-06ac648dfb8b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.450s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.768028] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.527s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.770854] env[61999]: INFO nova.compute.claims [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 528.809019] env[61999]: DEBUG oslo_vmware.rw_handles [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Completed reading data from the image iterator. {{(pid=61999) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 528.809019] env[61999]: DEBUG oslo_vmware.rw_handles [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61999) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 528.840731] env[61999]: DEBUG oslo_concurrency.lockutils [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.898198] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Releasing lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.898198] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 528.898198] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 528.898198] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb155fc2-e9df-40a5-913c-9d8949b0750a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.909462] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0bb73d-6be3-4f77-b954-ec9d3a55e85e {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.941659] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf8cf38f-4305-4f7b-a262-e3e7e863f3d1 could not be found. [ 528.941659] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 528.941659] env[61999]: INFO nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 528.941659] env[61999]: DEBUG oslo.service.loopingcall [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 528.941659] env[61999]: DEBUG nova.compute.manager [-] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 528.941659] env[61999]: DEBUG nova.network.neutron [-] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 528.944914] env[61999]: DEBUG nova.virt.vmwareapi.images [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Downloaded image file data 9eed6d08-b529-4317-89cc-ae13b2d60cea to vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk on the data store datastore1 {{(pid=61999) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 528.946701] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Caching image {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 528.946929] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Copying Virtual Disk [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk to [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 528.947196] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e783c38-99a2-4e1a-8f33-e075a45a34ac {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.957997] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 528.957997] env[61999]: value = "task-1043192" [ 528.957997] env[61999]: _type = "Task" [ 528.957997] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.974565] env[61999]: DEBUG nova.network.neutron [-] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.977017] env[61999]: DEBUG oslo_concurrency.lockutils [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] Releasing lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.977261] env[61999]: DEBUG nova.compute.manager [req-4f9939a7-e901-4899-a3a5-72a128fa3705 req-736bdf68-2667-4aa8-9667-17fd65998817 service nova] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Received event network-vif-deleted-1adcc44d-200e-4a63-9f0d-09ff6428bad7 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 528.983640] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.313453] env[61999]: ERROR nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. [ 529.313453] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 529.313453] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 529.313453] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 529.313453] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.313453] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 529.313453] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.313453] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 529.313453] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.313453] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 529.313453] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.313453] env[61999]: ERROR nova.compute.manager raise self.value [ 529.313453] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.313453] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 529.313453] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.313453] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 529.313938] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.313938] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 529.313938] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. [ 529.313938] env[61999]: ERROR nova.compute.manager [ 529.313938] env[61999]: Traceback (most recent call last): [ 529.313938] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 529.313938] env[61999]: listener.cb(fileno) [ 529.313938] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.313938] env[61999]: result = function(*args, **kwargs) [ 529.313938] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.313938] env[61999]: return func(*args, **kwargs) [ 529.313938] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 529.313938] env[61999]: raise e [ 529.313938] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 529.313938] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 529.313938] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.313938] env[61999]: created_port_ids = self._update_ports_for_instance( [ 529.313938] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.313938] env[61999]: with excutils.save_and_reraise_exception(): [ 529.313938] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.313938] env[61999]: self.force_reraise() [ 529.313938] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.313938] env[61999]: raise self.value [ 529.313938] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.313938] env[61999]: updated_port = self._update_port( [ 529.313938] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.313938] env[61999]: _ensure_no_port_binding_failure(port) [ 529.313938] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.313938] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 529.314787] env[61999]: nova.exception.PortBindingFailed: Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. [ 529.314787] env[61999]: Removing descriptor: 16 [ 529.314787] env[61999]: ERROR nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Traceback (most recent call last): [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] yield resources [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self.driver.spawn(context, instance, image_meta, [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.314787] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] vm_ref = self.build_virtual_machine(instance, [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] for vif in network_info: [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] return self._sync_wrapper(fn, *args, **kwargs) [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self.wait() [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self[:] = self._gt.wait() [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] return self._exit_event.wait() [ 529.315145] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] result = hub.switch() [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] return self.greenlet.switch() [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] result = function(*args, **kwargs) [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] return func(*args, **kwargs) [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] raise e [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] nwinfo = self.network_api.allocate_for_instance( [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.315465] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] created_port_ids = self._update_ports_for_instance( [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] with excutils.save_and_reraise_exception(): [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self.force_reraise() [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] raise self.value [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] updated_port = self._update_port( [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] _ensure_no_port_binding_failure(port) [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.316780] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] raise exception.PortBindingFailed(port_id=port['id']) [ 529.317254] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] nova.exception.PortBindingFailed: Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. [ 529.317254] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] [ 529.317254] env[61999]: INFO nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Terminating instance [ 529.469361] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043192, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.478905] env[61999]: DEBUG nova.network.neutron [-] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.819599] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Acquiring lock "refresh_cache-75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.819818] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Acquired lock "refresh_cache-75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.819997] env[61999]: DEBUG nova.network.neutron [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 529.928952] env[61999]: DEBUG oslo_concurrency.lockutils [None req-c187832f-648d-43a7-b7d9-4b6c9efc553a tempest-ImagesOneServerNegativeTestJSON-776015163 tempest-ImagesOneServerNegativeTestJSON-776015163-project-member] Acquiring lock "f5514c9d-0187-4daa-8db8-6ff7ad3fca8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.929219] env[61999]: DEBUG oslo_concurrency.lockutils [None req-c187832f-648d-43a7-b7d9-4b6c9efc553a tempest-ImagesOneServerNegativeTestJSON-776015163 tempest-ImagesOneServerNegativeTestJSON-776015163-project-member] Lock "f5514c9d-0187-4daa-8db8-6ff7ad3fca8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.970766] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043192, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.919598} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.973917] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Copied Virtual Disk [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk to [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 529.974182] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleting the datastore file [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea/tmp-sparse.vmdk {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 529.976837] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97c5fd88-29a6-485b-985d-d85cfa8b16f4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.983093] env[61999]: INFO nova.compute.manager [-] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Took 1.04 seconds to deallocate network for instance. [ 529.983093] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 529.983093] env[61999]: value = "task-1043193" [ 529.983093] env[61999]: _type = "Task" [ 529.983093] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.993378] env[61999]: DEBUG nova.compute.claims [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 529.993378] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.998961] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043193, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.039141] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fa0f25-9064-4a5d-af29-b2aa81c8f047 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.047500] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a28da75-182b-41f5-a77c-2d6d3f67d57c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.091812] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b09ad2-af69-4ed7-abba-06a2b9286ebd {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.100187] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7140a9-8d47-4063-8e19-272ecf20b6bc {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.114622] env[61999]: DEBUG nova.compute.provider_tree [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.359971] env[61999]: DEBUG nova.network.neutron [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.425742] env[61999]: DEBUG nova.network.neutron [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.496837] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043193, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022296} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.496837] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleted the datastore file {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 530.496837] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Moving file from [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3/9eed6d08-b529-4317-89cc-ae13b2d60cea to [datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea. {{(pid=61999) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 530.496837] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-b2005188-f76e-487e-8a12-f5c9f9e5f74e {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.505490] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 530.505490] env[61999]: value = "task-1043194" [ 530.505490] env[61999]: _type = "Task" [ 530.505490] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.515442] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043194, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.601837] env[61999]: DEBUG oslo_concurrency.lockutils [None req-0ec6147b-3cf8-4631-bc91-1aed32e1dceb tempest-VolumesAdminNegativeTest-1334958800 tempest-VolumesAdminNegativeTest-1334958800-project-member] Acquiring lock "50dd7faa-07d5-4f9b-89e2-da387d10a115" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.602211] env[61999]: DEBUG oslo_concurrency.lockutils [None req-0ec6147b-3cf8-4631-bc91-1aed32e1dceb tempest-VolumesAdminNegativeTest-1334958800 tempest-VolumesAdminNegativeTest-1334958800-project-member] Lock "50dd7faa-07d5-4f9b-89e2-da387d10a115" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.620896] env[61999]: DEBUG nova.scheduler.client.report [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 530.929114] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Releasing lock "refresh_cache-75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.929654] env[61999]: DEBUG nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 530.929858] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 530.930178] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee2c94d0-28ad-42c4-ae76-cb37b1d9f072 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.944206] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c64ac2-5415-4014-b5a2-735f3b3998d6 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.968080] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e could not be found. [ 530.968337] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 530.968771] env[61999]: INFO nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 530.969061] env[61999]: DEBUG oslo.service.loopingcall [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 530.969367] env[61999]: DEBUG nova.compute.manager [-] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 530.969428] env[61999]: DEBUG nova.network.neutron [-] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 531.003707] env[61999]: DEBUG nova.network.neutron [-] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.020855] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043194, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.02507} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.020855] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] File moved {{(pid=61999) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 531.021126] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Cleaning up location [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3 {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 531.021126] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleting the datastore file [datastore1] vmware_temp/6ebb8715-6dae-4f17-9d94-2987b87d75a3 {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 531.021354] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc1e3b87-38b9-429c-ac7c-165c2cffe3b2 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.027273] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 531.027273] env[61999]: value = "task-1043195" [ 531.027273] env[61999]: _type = "Task" [ 531.027273] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.035704] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.044579] env[61999]: DEBUG nova.compute.manager [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Received event network-changed-2b139f42-4096-4b41-9635-757e2d880205 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 531.044806] env[61999]: DEBUG nova.compute.manager [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Refreshing instance network info cache due to event network-changed-2b139f42-4096-4b41-9635-757e2d880205. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 531.045490] env[61999]: DEBUG oslo_concurrency.lockutils [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] Acquiring lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.045490] env[61999]: DEBUG oslo_concurrency.lockutils [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] Acquired lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.045490] env[61999]: DEBUG nova.network.neutron [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Refreshing network info cache for port 2b139f42-4096-4b41-9635-757e2d880205 {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 531.123821] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.124363] env[61999]: DEBUG nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 531.127189] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.316s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.128988] env[61999]: INFO nova.compute.claims [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 531.508087] env[61999]: DEBUG nova.network.neutron [-] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.542604] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022627} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.543022] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleted the datastore file {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 531.544029] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7f8f879-6180-4f12-8c0e-96c6d2f13c64 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.549758] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 531.549758] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]52f14baa-3366-1fc8-58a7-aaa50208b667" [ 531.549758] env[61999]: _type = "Task" [ 531.549758] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.562597] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]52f14baa-3366-1fc8-58a7-aaa50208b667, 'name': SearchDatastore_Task, 'duration_secs': 0.008398} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.565250] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.565250] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk to [datastore1] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 531.565250] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad082305-bffe-4198-9d4f-7fa8240db680 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.573471] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 531.573471] env[61999]: value = "task-1043196" [ 531.573471] env[61999]: _type = "Task" [ 531.573471] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.581765] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043196, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.603052] env[61999]: DEBUG nova.network.neutron [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.610704] env[61999]: DEBUG oslo_concurrency.lockutils [None req-2e544521-3da2-45f8-9303-35d79d3c3e96 tempest-FloatingIPsAssociationNegativeTestJSON-572681282 tempest-FloatingIPsAssociationNegativeTestJSON-572681282-project-member] Acquiring lock "65ea7f95-aeca-47ee-891e-f4388fc86ef1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.610947] env[61999]: DEBUG oslo_concurrency.lockutils [None req-2e544521-3da2-45f8-9303-35d79d3c3e96 tempest-FloatingIPsAssociationNegativeTestJSON-572681282 tempest-FloatingIPsAssociationNegativeTestJSON-572681282-project-member] Lock "65ea7f95-aeca-47ee-891e-f4388fc86ef1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.634833] env[61999]: DEBUG nova.compute.utils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 531.637824] env[61999]: DEBUG nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 531.638044] env[61999]: DEBUG nova.network.neutron [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 531.813898] env[61999]: DEBUG nova.policy [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1300c36760724105bacb20e5bddaaabb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13f3224f6dcc48c19fdef8a47403589b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 531.828769] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7c699679-1d63-4aa7-bd5d-6b4739afe5bf tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Acquiring lock "ecda336a-cf05-46cf-884e-9ac5d5ca8b1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.828769] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7c699679-1d63-4aa7-bd5d-6b4739afe5bf tempest-DeleteServersAdminTestJSON-181869864 tempest-DeleteServersAdminTestJSON-181869864-project-member] Lock "ecda336a-cf05-46cf-884e-9ac5d5ca8b1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.884646] env[61999]: DEBUG nova.network.neutron [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.011602] env[61999]: INFO nova.compute.manager [-] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Took 1.04 seconds to deallocate network for instance. [ 532.014701] env[61999]: DEBUG nova.compute.claims [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 532.014701] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.085747] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043196, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459887} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.086271] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk to [datastore1] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 532.086271] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Extending root virtual disk to 1048576 {{(pid=61999) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 532.086501] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34724c89-a5f7-4004-a5d4-b36bf91b2a91 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.092851] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 532.092851] env[61999]: value = "task-1043197" [ 532.092851] env[61999]: _type = "Task" [ 532.092851] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.111133] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.154937] env[61999]: DEBUG nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 532.390701] env[61999]: DEBUG oslo_concurrency.lockutils [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] Releasing lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.390701] env[61999]: DEBUG nova.compute.manager [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Received event network-vif-deleted-2b139f42-4096-4b41-9635-757e2d880205 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 532.390701] env[61999]: DEBUG nova.compute.manager [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Received event network-changed-cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 532.390873] env[61999]: DEBUG nova.compute.manager [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Refreshing instance network info cache due to event network-changed-cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 532.390921] env[61999]: DEBUG oslo_concurrency.lockutils [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] Acquiring lock "refresh_cache-75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.394274] env[61999]: DEBUG oslo_concurrency.lockutils [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] Acquired lock "refresh_cache-75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.394677] env[61999]: DEBUG nova.network.neutron [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Refreshing network info cache for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 532.493557] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817b950f-a9ef-4fc0-9e98-540df63c70b8 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.502524] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db543460-5885-4963-873f-f13c20712898 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.539469] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5b53ed-cf40-4062-aec3-4d2dadb39b62 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.548837] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e52f1a-fdae-4471-9088-5c56525249cf {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.565099] env[61999]: DEBUG nova.compute.provider_tree [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.602958] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043197, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066787} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.604766] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Extended root virtual disk {{(pid=61999) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 532.605428] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba0dad2-e96c-4ed1-8c09-2f72bf7fb2d4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.630938] env[61999]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk or device None with type sparse {{(pid=61999) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 532.631485] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d3ffaa8-6574-44ad-bfc9-beb20e3860d9 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.652306] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 532.652306] env[61999]: value = "task-1043198" [ 532.652306] env[61999]: _type = "Task" [ 532.652306] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.674036] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043198, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.948736] env[61999]: DEBUG nova.network.neutron [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 533.068509] env[61999]: DEBUG nova.scheduler.client.report [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 533.149030] env[61999]: DEBUG nova.network.neutron [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Successfully created port: 8ec72c4d-87e3-44f6-b852-f9087e8bdb8d {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 533.162244] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043198, 'name': ReconfigVM_Task, 'duration_secs': 0.323561} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.162728] env[61999]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk or device None with type sparse {{(pid=61999) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 533.163344] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d687d9e5-c7e8-4837-aae4-040f2312fe40 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.170506] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 533.170506] env[61999]: value = "task-1043199" [ 533.170506] env[61999]: _type = "Task" [ 533.170506] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.174664] env[61999]: DEBUG nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 533.182598] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043199, 'name': Rename_Task} progress is 5%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.204182] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 533.204438] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 533.204594] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 533.204771] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 533.204914] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 533.205147] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 533.205377] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 533.205537] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 533.205701] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 533.206329] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 533.206329] env[61999]: DEBUG nova.virt.hardware [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 533.206944] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0b4d68-f640-4771-8c1c-a292d33f75db {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.215883] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff7a3d3-403f-406e-b041-3e64df22a57a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.235339] env[61999]: DEBUG nova.network.neutron [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.579187] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.579746] env[61999]: DEBUG nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 533.583091] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.162s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.588202] env[61999]: INFO nova.compute.claims [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 533.686868] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043199, 'name': Rename_Task, 'duration_secs': 0.130357} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.687655] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powering on the VM {{(pid=61999) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 533.689440] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b35d387-5559-41d9-b73e-5c6c4d44dd5d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.696220] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 533.696220] env[61999]: value = "task-1043200" [ 533.696220] env[61999]: _type = "Task" [ 533.696220] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.708094] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.738528] env[61999]: DEBUG oslo_concurrency.lockutils [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] Releasing lock "refresh_cache-75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.738829] env[61999]: DEBUG nova.compute.manager [req-a5d94061-e82d-4f14-8c4e-56bac19e778f req-a5e0609e-7984-4919-b457-1fb185e7b5c3 service nova] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Received event network-vif-deleted-cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 534.093378] env[61999]: DEBUG nova.compute.utils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 534.099694] env[61999]: DEBUG nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 534.101200] env[61999]: DEBUG nova.network.neutron [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 534.214431] env[61999]: DEBUG oslo_vmware.api [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043200, 'name': PowerOnVM_Task, 'duration_secs': 0.449519} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.214431] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powered on the VM {{(pid=61999) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 534.214431] env[61999]: DEBUG nova.compute.manager [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Checking state {{(pid=61999) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 534.215100] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2312da1-c5e3-4ff8-a14e-d697f2033744 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.413391] env[61999]: DEBUG nova.policy [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b38f7306bbf64d83860bc1c10ad725ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e5a44ef6efe4b04a78e78386f80f21a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 534.602098] env[61999]: DEBUG nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 534.732093] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.947628] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933d954f-912e-43d2-a710-e383cdaa563b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.959015] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d18b6db-1a4b-47de-b4a3-cc0f9f3c753b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.996413] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823eddd9-b833-4d2d-83a0-a01025dea696 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.005525] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ca1e25-a48f-425e-8ecc-99a045d0ff9d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.026819] env[61999]: DEBUG nova.compute.provider_tree [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.528519] env[61999]: DEBUG nova.scheduler.client.report [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 535.548837] env[61999]: DEBUG nova.network.neutron [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Successfully created port: d916bee3-a21b-433c-a05c-bd648929118d {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 535.612010] env[61999]: DEBUG nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 535.637704] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 535.637841] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 535.638364] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 535.638364] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 535.638364] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 535.638592] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 535.638818] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 535.638895] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 535.639036] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 535.639195] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 535.639358] env[61999]: DEBUG nova.virt.hardware [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 535.640279] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d5d062-ba02-4122-83b0-192555abd0a6 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.650658] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fbe9ac-1fd5-4019-be14-e352c2a7bd61 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.039462] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.039462] env[61999]: DEBUG nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 536.041328] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.043s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.261136] env[61999]: INFO nova.compute.manager [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Rebuilding instance [ 536.324193] env[61999]: DEBUG nova.compute.manager [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Checking state {{(pid=61999) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 536.325410] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c53ddc5-c4b6-4f92-8c16-09f5c43fce6f {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.543465] env[61999]: DEBUG nova.compute.utils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 536.545067] env[61999]: DEBUG nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 536.545316] env[61999]: DEBUG nova.network.neutron [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 536.822095] env[61999]: DEBUG nova.policy [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dc6a64f93b4437aabf3d98861ee8527', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '295764aa1a3d4e6b9b5170d313085c01', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 536.915896] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5affd766-1f93-4d67-a59d-a03c2e9b0dbb {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.924097] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28ec8a7-a496-4a3a-be98-1c2793e3232d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.964923] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db0bcb8-1e9c-44c4-9b28-642715add343 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.974180] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c499bcda-ab2d-44e2-a5ab-59994e99e23b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.990996] env[61999]: DEBUG nova.compute.provider_tree [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.053915] env[61999]: DEBUG nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 537.347702] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powering off the VM {{(pid=61999) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 537.348156] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b94a6c49-78ad-49c5-b4c6-3d449c470060 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.358472] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 537.358472] env[61999]: value = "task-1043201" [ 537.358472] env[61999]: _type = "Task" [ 537.358472] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.374350] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043201, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.494945] env[61999]: DEBUG nova.scheduler.client.report [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 537.818908] env[61999]: DEBUG nova.compute.manager [req-c106dc75-ded9-4dcf-87e1-2690df0c30cb req-dd128cf4-50f8-44bc-8738-a7b53483259c service nova] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Received event network-changed-8ec72c4d-87e3-44f6-b852-f9087e8bdb8d {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 537.818908] env[61999]: DEBUG nova.compute.manager [req-c106dc75-ded9-4dcf-87e1-2690df0c30cb req-dd128cf4-50f8-44bc-8738-a7b53483259c service nova] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Refreshing instance network info cache due to event network-changed-8ec72c4d-87e3-44f6-b852-f9087e8bdb8d. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 537.818908] env[61999]: DEBUG oslo_concurrency.lockutils [req-c106dc75-ded9-4dcf-87e1-2690df0c30cb req-dd128cf4-50f8-44bc-8738-a7b53483259c service nova] Acquiring lock "refresh_cache-fdb42889-b8ef-4a8a-a7f4-17397f29abd6" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.818908] env[61999]: DEBUG oslo_concurrency.lockutils [req-c106dc75-ded9-4dcf-87e1-2690df0c30cb req-dd128cf4-50f8-44bc-8738-a7b53483259c service nova] Acquired lock "refresh_cache-fdb42889-b8ef-4a8a-a7f4-17397f29abd6" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.818908] env[61999]: DEBUG nova.network.neutron [req-c106dc75-ded9-4dcf-87e1-2690df0c30cb req-dd128cf4-50f8-44bc-8738-a7b53483259c service nova] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Refreshing network info cache for port 8ec72c4d-87e3-44f6-b852-f9087e8bdb8d {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 537.868848] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043201, 'name': PowerOffVM_Task, 'duration_secs': 0.132542} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.869455] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powered off the VM {{(pid=61999) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 537.869864] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 537.870862] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec265bc1-9f3a-4412-9c7b-17f99d3fb851 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.879694] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Unregistering the VM {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 537.879694] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88da866a-ab5d-4eed-aa06-0b7160026a4b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.906152] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Unregistered the VM {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 537.906152] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Deleting contents of the VM from datastore datastore1 {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 537.906385] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Deleting the datastore file [datastore1] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5 {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 537.906683] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20368ad2-ccf9-41c3-a2f8-a79b69e2bc92 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.915466] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 537.915466] env[61999]: value = "task-1043203" [ 537.915466] env[61999]: _type = "Task" [ 537.915466] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.927591] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.000467] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.959s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.004413] env[61999]: ERROR nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Traceback (most recent call last): [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self.driver.spawn(context, instance, image_meta, [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self._vmops.spawn(context, instance, image_meta, injected_files, [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] vm_ref = self.build_virtual_machine(instance, [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] vif_infos = vmwarevif.get_vif_info(self._session, [ 538.004413] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] for vif in network_info: [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] return self._sync_wrapper(fn, *args, **kwargs) [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self.wait() [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self[:] = self._gt.wait() [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] return self._exit_event.wait() [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] result = hub.switch() [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 538.004713] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] return self.greenlet.switch() [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] result = function(*args, **kwargs) [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] return func(*args, **kwargs) [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] raise e [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] nwinfo = self.network_api.allocate_for_instance( [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] created_port_ids = self._update_ports_for_instance( [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] with excutils.save_and_reraise_exception(): [ 538.005047] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] self.force_reraise() [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] raise self.value [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] updated_port = self._update_port( [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] _ensure_no_port_binding_failure(port) [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] raise exception.PortBindingFailed(port_id=port['id']) [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] nova.exception.PortBindingFailed: Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. [ 538.005368] env[61999]: ERROR nova.compute.manager [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] [ 538.005652] env[61999]: DEBUG nova.compute.utils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. {{(pid=61999) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 538.006864] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.652s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.012813] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Build of instance 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181 was re-scheduled: Binding failed for port f5a48c9e-a997-40fc-89d8-27d5dc7820e7, please check neutron logs for more information. {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 538.013475] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Unplugging VIFs for instance {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 538.013758] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Acquiring lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.013942] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Acquired lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.014149] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 538.020079] env[61999]: ERROR nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8ec72c4d-87e3-44f6-b852-f9087e8bdb8d, please check neutron logs for more information. [ 538.020079] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 538.020079] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 538.020079] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 538.020079] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.020079] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 538.020079] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.020079] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 538.020079] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.020079] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 538.020079] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.020079] env[61999]: ERROR nova.compute.manager raise self.value [ 538.020079] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.020079] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 538.020079] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.020079] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 538.021094] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.021094] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 538.021094] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8ec72c4d-87e3-44f6-b852-f9087e8bdb8d, please check neutron logs for more information. [ 538.021094] env[61999]: ERROR nova.compute.manager [ 538.021094] env[61999]: Traceback (most recent call last): [ 538.021094] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 538.021094] env[61999]: listener.cb(fileno) [ 538.021094] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.021094] env[61999]: result = function(*args, **kwargs) [ 538.021094] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 538.021094] env[61999]: return func(*args, **kwargs) [ 538.021094] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 538.021094] env[61999]: raise e [ 538.021094] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 538.021094] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 538.021094] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.021094] env[61999]: created_port_ids = self._update_ports_for_instance( [ 538.021094] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.021094] env[61999]: with excutils.save_and_reraise_exception(): [ 538.021094] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.021094] env[61999]: self.force_reraise() [ 538.021094] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.021094] env[61999]: raise self.value [ 538.021094] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.021094] env[61999]: updated_port = self._update_port( [ 538.021094] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.021094] env[61999]: _ensure_no_port_binding_failure(port) [ 538.021094] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.021094] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 538.021827] env[61999]: nova.exception.PortBindingFailed: Binding failed for port 8ec72c4d-87e3-44f6-b852-f9087e8bdb8d, please check neutron logs for more information. [ 538.021827] env[61999]: Removing descriptor: 15 [ 538.021827] env[61999]: ERROR nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8ec72c4d-87e3-44f6-b852-f9087e8bdb8d, please check neutron logs for more information. [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Traceback (most recent call last): [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] yield resources [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] self.driver.spawn(context, instance, image_meta, [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 538.021827] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] vm_ref = self.build_virtual_machine(instance, [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] for vif in network_info: [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] return self._sync_wrapper(fn, *args, **kwargs) [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] self.wait() [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] self[:] = self._gt.wait() [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] return self._exit_event.wait() [ 538.022160] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] result = hub.switch() [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] return self.greenlet.switch() [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] result = function(*args, **kwargs) [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] return func(*args, **kwargs) [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] raise e [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] nwinfo = self.network_api.allocate_for_instance( [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.022498] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] created_port_ids = self._update_ports_for_instance( [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] with excutils.save_and_reraise_exception(): [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] self.force_reraise() [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] raise self.value [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] updated_port = self._update_port( [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] _ensure_no_port_binding_failure(port) [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.022812] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] raise exception.PortBindingFailed(port_id=port['id']) [ 538.023125] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] nova.exception.PortBindingFailed: Binding failed for port 8ec72c4d-87e3-44f6-b852-f9087e8bdb8d, please check neutron logs for more information. [ 538.023125] env[61999]: ERROR nova.compute.manager [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] [ 538.023125] env[61999]: INFO nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Terminating instance [ 538.069601] env[61999]: DEBUG nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 538.106884] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 538.107261] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 538.107458] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 538.107720] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 538.107862] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 538.108077] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 538.108420] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 538.108631] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 538.108848] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 538.109143] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 538.109481] env[61999]: DEBUG nova.virt.hardware [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 538.110339] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0593239b-177e-4e32-b6c2-15587ed118e6 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.120521] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4f246b-9686-4e19-9262-a773eb89669f {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.415461] env[61999]: DEBUG nova.network.neutron [req-c106dc75-ded9-4dcf-87e1-2690df0c30cb req-dd128cf4-50f8-44bc-8738-a7b53483259c service nova] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.429721] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088709} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 538.429971] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Deleted the datastore file {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 538.433826] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Deleted contents of the VM from datastore datastore1 {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 538.434501] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 538.528191] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Acquiring lock "refresh_cache-fdb42889-b8ef-4a8a-a7f4-17397f29abd6" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.607359] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.740394] env[61999]: DEBUG nova.network.neutron [req-c106dc75-ded9-4dcf-87e1-2690df0c30cb req-dd128cf4-50f8-44bc-8738-a7b53483259c service nova] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.777394] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c07bef8-606e-464d-b179-6ca2cef11dbf {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.785845] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c5a310-e6ee-475c-a13b-732cfa3598fa {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.827050] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce77f189-9cf2-4731-bcbb-d4ffb35f30fe {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.835519] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0da5681-f165-4779-afb2-51c1ca0fc7bc {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.854027] env[61999]: DEBUG nova.compute.provider_tree [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.898523] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.973086] env[61999]: DEBUG nova.network.neutron [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Successfully created port: 7557fb5c-a2d3-4172-82a6-20fab428f77e {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 539.243381] env[61999]: DEBUG oslo_concurrency.lockutils [req-c106dc75-ded9-4dcf-87e1-2690df0c30cb req-dd128cf4-50f8-44bc-8738-a7b53483259c service nova] Releasing lock "refresh_cache-fdb42889-b8ef-4a8a-a7f4-17397f29abd6" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.244665] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Acquired lock "refresh_cache-fdb42889-b8ef-4a8a-a7f4-17397f29abd6" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.244665] env[61999]: DEBUG nova.network.neutron [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 539.357456] env[61999]: DEBUG nova.scheduler.client.report [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 539.401774] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Releasing lock "refresh_cache-2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.401867] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 539.402121] env[61999]: DEBUG nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 539.402186] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 539.446198] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.481324] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 539.481324] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.481324] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 539.481324] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.481514] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 539.481514] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 539.481514] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 539.481514] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 539.481639] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 539.481674] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 539.485187] env[61999]: DEBUG nova.virt.hardware [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 539.485187] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e6f63f-2df3-4c10-a54e-e4595f3c635c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.495664] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6759593-ba10-43d4-8323-a875a2ff73e4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.519528] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Instance VIF info [] {{(pid=61999) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 539.524400] env[61999]: DEBUG oslo.service.loopingcall [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.524868] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Creating VM on the ESX host {{(pid=61999) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 539.525383] env[61999]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12d6cfb4-ee70-4af0-bff2-5248bc2f1661 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.547124] env[61999]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 539.547124] env[61999]: value = "task-1043204" [ 539.547124] env[61999]: _type = "Task" [ 539.547124] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.558436] env[61999]: DEBUG oslo_vmware.api [-] Task: {'id': task-1043204, 'name': CreateVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.802476] env[61999]: DEBUG nova.network.neutron [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.864433] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.857s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.864821] env[61999]: ERROR nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Traceback (most recent call last): [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self.driver.spawn(context, instance, image_meta, [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] vm_ref = self.build_virtual_machine(instance, [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 539.864821] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] for vif in network_info: [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] return self._sync_wrapper(fn, *args, **kwargs) [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self.wait() [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self[:] = self._gt.wait() [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] return self._exit_event.wait() [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] result = hub.switch() [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 539.865358] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] return self.greenlet.switch() [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] result = function(*args, **kwargs) [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] return func(*args, **kwargs) [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] raise e [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] nwinfo = self.network_api.allocate_for_instance( [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] created_port_ids = self._update_ports_for_instance( [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] with excutils.save_and_reraise_exception(): [ 539.865805] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] self.force_reraise() [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] raise self.value [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] updated_port = self._update_port( [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] _ensure_no_port_binding_failure(port) [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] raise exception.PortBindingFailed(port_id=port['id']) [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] nova.exception.PortBindingFailed: Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. [ 539.866182] env[61999]: ERROR nova.compute.manager [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] [ 539.866735] env[61999]: DEBUG nova.compute.utils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. {{(pid=61999) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 539.867169] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.880s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.871070] env[61999]: INFO nova.compute.claims [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.875688] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Build of instance 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4 was re-scheduled: Binding failed for port f242c8f1-bf66-4285-afd0-eac259344df8, please check neutron logs for more information. {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 539.875688] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Unplugging VIFs for instance {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 539.875688] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Acquiring lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.875688] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Acquired lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.875913] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 539.957182] env[61999]: DEBUG nova.network.neutron [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.021021] env[61999]: DEBUG nova.network.neutron [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.064027] env[61999]: DEBUG oslo_vmware.api [-] Task: {'id': task-1043204, 'name': CreateVM_Task, 'duration_secs': 0.305836} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.064027] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Created VM on the ESX host {{(pid=61999) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 540.064027] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.064027] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.064896] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 540.064896] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7ecf9e8-a8dd-4d45-85a5-761f15976a63 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.070927] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 540.070927] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]5262330e-8070-c790-0daa-0ea93d1900f1" [ 540.070927] env[61999]: _type = "Task" [ 540.070927] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.080563] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]5262330e-8070-c790-0daa-0ea93d1900f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.086650] env[61999]: DEBUG oslo_concurrency.lockutils [None req-a4ca8b03-f7a2-4a45-9570-1e2d2e89747f tempest-ServersTestMultiNic-2055868010 tempest-ServersTestMultiNic-2055868010-project-member] Acquiring lock "82d591b3-d835-4f03-b3f8-316313e3f66b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.087774] env[61999]: DEBUG oslo_concurrency.lockutils [None req-a4ca8b03-f7a2-4a45-9570-1e2d2e89747f tempest-ServersTestMultiNic-2055868010 tempest-ServersTestMultiNic-2055868010-project-member] Lock "82d591b3-d835-4f03-b3f8-316313e3f66b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.420473] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 540.458848] env[61999]: INFO nova.compute.manager [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] [instance: 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181] Took 1.06 seconds to deallocate network for instance. [ 540.524034] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Releasing lock "refresh_cache-fdb42889-b8ef-4a8a-a7f4-17397f29abd6" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.525129] env[61999]: DEBUG nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 540.525129] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 540.525129] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fad82730-a7f3-4281-a417-64d4f7edf5c2 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.534882] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75f1aa3-e703-49d8-8dde-59a237045c3d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.564153] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fdb42889-b8ef-4a8a-a7f4-17397f29abd6 could not be found. [ 540.564153] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 540.564153] env[61999]: INFO nova.compute.manager [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 540.564259] env[61999]: DEBUG oslo.service.loopingcall [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 540.564692] env[61999]: DEBUG nova.compute.manager [-] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 540.564692] env[61999]: DEBUG nova.network.neutron [-] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 540.583746] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]5262330e-8070-c790-0daa-0ea93d1900f1, 'name': SearchDatastore_Task, 'duration_secs': 0.030832} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.584088] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.584328] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Processing image 9eed6d08-b529-4317-89cc-ae13b2d60cea {{(pid=61999) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 540.584556] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.584698] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.584874] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 540.585154] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba95a03f-e404-41da-906d-3af8d6799586 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.594078] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61999) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 540.594274] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61999) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 540.595196] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aec9eca3-b921-44d4-b955-a94f923f08a2 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.600165] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 540.600165] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]527ae3e6-d2ba-81cc-957b-e446790d818d" [ 540.600165] env[61999]: _type = "Task" [ 540.600165] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.609985] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]527ae3e6-d2ba-81cc-957b-e446790d818d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.615384] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.624749] env[61999]: DEBUG nova.network.neutron [-] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 540.778937] env[61999]: ERROR nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d916bee3-a21b-433c-a05c-bd648929118d, please check neutron logs for more information. [ 540.778937] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 540.778937] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 540.778937] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 540.778937] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.778937] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 540.778937] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.778937] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 540.778937] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.778937] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 540.778937] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.778937] env[61999]: ERROR nova.compute.manager raise self.value [ 540.778937] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.778937] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 540.778937] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.778937] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 540.779531] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.779531] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 540.779531] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d916bee3-a21b-433c-a05c-bd648929118d, please check neutron logs for more information. [ 540.779531] env[61999]: ERROR nova.compute.manager [ 540.779531] env[61999]: Traceback (most recent call last): [ 540.779531] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 540.779531] env[61999]: listener.cb(fileno) [ 540.779753] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.779753] env[61999]: result = function(*args, **kwargs) [ 540.779753] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 540.779753] env[61999]: return func(*args, **kwargs) [ 540.779753] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 540.779753] env[61999]: raise e [ 540.779753] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 540.779753] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 540.779753] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.779753] env[61999]: created_port_ids = self._update_ports_for_instance( [ 540.779753] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.779753] env[61999]: with excutils.save_and_reraise_exception(): [ 540.779753] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.779753] env[61999]: self.force_reraise() [ 540.779753] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.779753] env[61999]: raise self.value [ 540.779753] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.779753] env[61999]: updated_port = self._update_port( [ 540.779753] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.779753] env[61999]: _ensure_no_port_binding_failure(port) [ 540.779753] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.779753] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 540.779753] env[61999]: nova.exception.PortBindingFailed: Binding failed for port d916bee3-a21b-433c-a05c-bd648929118d, please check neutron logs for more information. [ 540.779753] env[61999]: Removing descriptor: 16 [ 540.780758] env[61999]: ERROR nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d916bee3-a21b-433c-a05c-bd648929118d, please check neutron logs for more information. [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Traceback (most recent call last): [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] yield resources [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] self.driver.spawn(context, instance, image_meta, [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] vm_ref = self.build_virtual_machine(instance, [ 540.780758] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] vif_infos = vmwarevif.get_vif_info(self._session, [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] for vif in network_info: [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] return self._sync_wrapper(fn, *args, **kwargs) [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] self.wait() [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] self[:] = self._gt.wait() [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] return self._exit_event.wait() [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 540.781260] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] result = hub.switch() [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] return self.greenlet.switch() [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] result = function(*args, **kwargs) [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] return func(*args, **kwargs) [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] raise e [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] nwinfo = self.network_api.allocate_for_instance( [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] created_port_ids = self._update_ports_for_instance( [ 540.781711] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] with excutils.save_and_reraise_exception(): [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] self.force_reraise() [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] raise self.value [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] updated_port = self._update_port( [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] _ensure_no_port_binding_failure(port) [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] raise exception.PortBindingFailed(port_id=port['id']) [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] nova.exception.PortBindingFailed: Binding failed for port d916bee3-a21b-433c-a05c-bd648929118d, please check neutron logs for more information. [ 540.782130] env[61999]: ERROR nova.compute.manager [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] [ 540.782524] env[61999]: INFO nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Terminating instance [ 540.970848] env[61999]: DEBUG nova.compute.manager [req-6fe7eb37-0c7a-4a1f-a723-506d44e2051a req-8a0b6432-3f53-4c7c-9c95-6dccd81a1f9f service nova] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Received event network-vif-deleted-8ec72c4d-87e3-44f6-b852-f9087e8bdb8d {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 541.115739] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]527ae3e6-d2ba-81cc-957b-e446790d818d, 'name': SearchDatastore_Task, 'duration_secs': 0.011156} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.120483] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Releasing lock "refresh_cache-530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.120809] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 541.121022] env[61999]: DEBUG nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 541.122378] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 541.123089] env[61999]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7df25e2f-2ee5-4f0a-9820-998b53bc5e85 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.127082] env[61999]: DEBUG nova.network.neutron [-] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.134566] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 541.134566] env[61999]: value = "session[526f0f75-97df-5b65-23f4-4bee22d46a56]5284ed4e-59cc-400c-e34a-de36448c1c72" [ 541.134566] env[61999]: _type = "Task" [ 541.134566] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.146126] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': session[526f0f75-97df-5b65-23f4-4bee22d46a56]5284ed4e-59cc-400c-e34a-de36448c1c72, 'name': SearchDatastore_Task, 'duration_secs': 0.008354} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.149979] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.150549] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk to [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 541.151237] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c7e1ddc-ecc8-454b-b1c8-43e09fa10954 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.161382] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 541.161382] env[61999]: value = "task-1043205" [ 541.161382] env[61999]: _type = "Task" [ 541.161382] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.170188] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.170412] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.241991] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ad0b6fae-0142-4dd5-899b-f861cb75340b tempest-ServerGroupTestJSON-480021389 tempest-ServerGroupTestJSON-480021389-project-member] Acquiring lock "2538a76e-7688-4f99-8133-19c9fde8cf50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.242447] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ad0b6fae-0142-4dd5-899b-f861cb75340b tempest-ServerGroupTestJSON-480021389 tempest-ServerGroupTestJSON-480021389-project-member] Lock "2538a76e-7688-4f99-8133-19c9fde8cf50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.246798] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d770ea5c-c681-425d-962a-854f986a3c21 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.256087] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8203f26f-1a9d-4395-807f-4d4c02ade030 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.295258] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Acquiring lock "refresh_cache-410c01fe-54f7-4e39-8689-aa2dbe7f1c7c" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.295987] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Acquired lock "refresh_cache-410c01fe-54f7-4e39-8689-aa2dbe7f1c7c" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.295987] env[61999]: DEBUG nova.network.neutron [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 541.297764] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0fbc5d-6210-4889-9d43-9bfe594f2ee1 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.306864] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b973d31b-0ca8-4e20-850f-ba44875c607a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.324661] env[61999]: DEBUG nova.compute.provider_tree [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 541.512109] env[61999]: INFO nova.scheduler.client.report [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Deleted allocations for instance 2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181 [ 541.633311] env[61999]: INFO nova.compute.manager [-] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Took 1.07 seconds to deallocate network for instance. [ 541.637481] env[61999]: DEBUG nova.compute.claims [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] [instance: fdb42889-b8ef-4a8a-a7f4-17397f29abd6] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 541.637687] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8389636f-4ea9-423a-a882-3ed30cff4c0f tempest-ServerDiagnosticsTest-459972063 tempest-ServerDiagnosticsTest-459972063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.671411] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043205, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471162} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.671411] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/9eed6d08-b529-4317-89cc-ae13b2d60cea/9eed6d08-b529-4317-89cc-ae13b2d60cea.vmdk to [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk {{(pid=61999) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 541.671672] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Extending root virtual disk to 1048576 {{(pid=61999) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 541.671891] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c94ae082-2b54-40bd-95ff-71ff0945f6c6 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.674509] env[61999]: DEBUG nova.network.neutron [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.682883] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 541.682883] env[61999]: value = "task-1043206" [ 541.682883] env[61999]: _type = "Task" [ 541.682883] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.690518] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043206, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.828035] env[61999]: DEBUG nova.scheduler.client.report [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 541.862038] env[61999]: DEBUG nova.network.neutron [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.029628] env[61999]: DEBUG oslo_concurrency.lockutils [None req-87828f42-23a1-4dd1-a08e-93dfaa7b2e5e tempest-ServerExternalEventsTest-1945139463 tempest-ServerExternalEventsTest-1945139463-project-member] Lock "2f6aa5c2-e9d0-4cf4-a9d5-85bc2a6ac181" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.535s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.178246] env[61999]: INFO nova.compute.manager [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] [instance: 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4] Took 1.06 seconds to deallocate network for instance. [ 542.201699] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043206, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063244} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.203178] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Extended root virtual disk {{(pid=61999) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 542.204211] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664c3f01-e476-4637-b1ac-bb57bb657e71 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.237106] env[61999]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk or device None with type sparse {{(pid=61999) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 542.237647] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f438b34-8c9d-49d9-adca-d7dbfa35da7d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.260201] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 542.260201] env[61999]: value = "task-1043207" [ 542.260201] env[61999]: _type = "Task" [ 542.260201] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.269220] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043207, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.333846] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.335196] env[61999]: DEBUG nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 542.338218] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.722s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.450753] env[61999]: DEBUG nova.network.neutron [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.534103] env[61999]: DEBUG nova.compute.manager [None req-787d7f22-ed8c-4c06-9773-261aed97575a tempest-ServersTestManualDisk-2007130817 tempest-ServersTestManualDisk-2007130817-project-member] [instance: 8e68085f-4bbd-480c-a925-e8714326ee9f] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 542.778682] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043207, 'name': ReconfigVM_Task, 'duration_secs': 0.272141} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.779022] env[61999]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5/1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5.vmdk or device None with type sparse {{(pid=61999) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 542.779603] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae411d8f-5c6d-4b06-8b83-1b266e97ef6e {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.787043] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 542.787043] env[61999]: value = "task-1043208" [ 542.787043] env[61999]: _type = "Task" [ 542.787043] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.797520] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043208, 'name': Rename_Task} progress is 5%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.844660] env[61999]: DEBUG nova.compute.utils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.848101] env[61999]: DEBUG nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 542.848101] env[61999]: DEBUG nova.network.neutron [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 542.952133] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Releasing lock "refresh_cache-410c01fe-54f7-4e39-8689-aa2dbe7f1c7c" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.953024] env[61999]: DEBUG nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 542.953024] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 542.953024] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-edc89185-c0c9-40b8-b2b0-489d8f4c6807 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.963855] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a100c8a-8546-452a-9340-77b596ce5538 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.996796] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c could not be found. [ 542.997104] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 542.997292] env[61999]: INFO nova.compute.manager [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 542.997546] env[61999]: DEBUG oslo.service.loopingcall [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 543.000640] env[61999]: DEBUG nova.compute.manager [-] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 543.000918] env[61999]: DEBUG nova.network.neutron [-] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 543.031598] env[61999]: DEBUG nova.policy [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32f1f3a86eb64905a046ac2b11faea8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a23f1eb4591a46418d8092fea8c0cc86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 543.054586] env[61999]: DEBUG nova.network.neutron [-] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.059748] env[61999]: DEBUG oslo_concurrency.lockutils [None req-787d7f22-ed8c-4c06-9773-261aed97575a tempest-ServersTestManualDisk-2007130817 tempest-ServersTestManualDisk-2007130817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.158694] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf0b5a4-8e94-4b36-9d97-acb18ad88767 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.166282] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e817c76a-7960-4b76-8102-445e21d0ca7c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.203528] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596c50a4-9848-4de8-b37f-da37768959c7 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.211126] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7dea43-8b1b-461a-843f-762db8180af1 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.225146] env[61999]: DEBUG nova.compute.provider_tree [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating inventory in ProviderTree for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 543.227179] env[61999]: INFO nova.scheduler.client.report [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Deleted allocations for instance 530dbdef-a78a-47ad-8cc5-5f0ffbea65b4 [ 543.264237] env[61999]: ERROR nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7557fb5c-a2d3-4172-82a6-20fab428f77e, please check neutron logs for more information. [ 543.264237] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 543.264237] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 543.264237] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 543.264237] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 543.264237] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 543.264237] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 543.264237] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 543.264237] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.264237] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 543.264237] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.264237] env[61999]: ERROR nova.compute.manager raise self.value [ 543.264237] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 543.264237] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 543.264237] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.264237] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 543.265022] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.265022] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 543.265022] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7557fb5c-a2d3-4172-82a6-20fab428f77e, please check neutron logs for more information. [ 543.265022] env[61999]: ERROR nova.compute.manager [ 543.265022] env[61999]: Traceback (most recent call last): [ 543.265022] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 543.265022] env[61999]: listener.cb(fileno) [ 543.265022] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.265022] env[61999]: result = function(*args, **kwargs) [ 543.265022] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 543.265022] env[61999]: return func(*args, **kwargs) [ 543.265022] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 543.265022] env[61999]: raise e [ 543.265022] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 543.265022] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 543.265022] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 543.265022] env[61999]: created_port_ids = self._update_ports_for_instance( [ 543.265022] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 543.265022] env[61999]: with excutils.save_and_reraise_exception(): [ 543.265022] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.265022] env[61999]: self.force_reraise() [ 543.265022] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.265022] env[61999]: raise self.value [ 543.265022] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 543.265022] env[61999]: updated_port = self._update_port( [ 543.265022] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.265022] env[61999]: _ensure_no_port_binding_failure(port) [ 543.265022] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.265022] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 543.266215] env[61999]: nova.exception.PortBindingFailed: Binding failed for port 7557fb5c-a2d3-4172-82a6-20fab428f77e, please check neutron logs for more information. [ 543.266215] env[61999]: Removing descriptor: 18 [ 543.266215] env[61999]: ERROR nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7557fb5c-a2d3-4172-82a6-20fab428f77e, please check neutron logs for more information. [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Traceback (most recent call last): [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] yield resources [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] self.driver.spawn(context, instance, image_meta, [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] self._vmops.spawn(context, instance, image_meta, injected_files, [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 543.266215] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] vm_ref = self.build_virtual_machine(instance, [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] vif_infos = vmwarevif.get_vif_info(self._session, [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] for vif in network_info: [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] return self._sync_wrapper(fn, *args, **kwargs) [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] self.wait() [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] self[:] = self._gt.wait() [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] return self._exit_event.wait() [ 543.266974] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] result = hub.switch() [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] return self.greenlet.switch() [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] result = function(*args, **kwargs) [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] return func(*args, **kwargs) [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] raise e [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] nwinfo = self.network_api.allocate_for_instance( [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 543.267538] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] created_port_ids = self._update_ports_for_instance( [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] with excutils.save_and_reraise_exception(): [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] self.force_reraise() [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] raise self.value [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] updated_port = self._update_port( [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] _ensure_no_port_binding_failure(port) [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.268505] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] raise exception.PortBindingFailed(port_id=port['id']) [ 543.268834] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] nova.exception.PortBindingFailed: Binding failed for port 7557fb5c-a2d3-4172-82a6-20fab428f77e, please check neutron logs for more information. [ 543.268834] env[61999]: ERROR nova.compute.manager [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] [ 543.268834] env[61999]: INFO nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Terminating instance [ 543.302755] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043208, 'name': Rename_Task, 'duration_secs': 0.129016} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.303060] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powering on the VM {{(pid=61999) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 543.303307] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aeb94ba5-29f3-467c-ae68-e62f816b45ce {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.309934] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Waiting for the task: (returnval){ [ 543.309934] env[61999]: value = "task-1043209" [ 543.309934] env[61999]: _type = "Task" [ 543.309934] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.318077] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043209, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.350036] env[61999]: DEBUG nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 543.559447] env[61999]: DEBUG nova.network.neutron [-] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.744354] env[61999]: DEBUG oslo_concurrency.lockutils [None req-ff4ff885-1007-4284-bc13-6c0a50781ea3 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Lock "530dbdef-a78a-47ad-8cc5-5f0ffbea65b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.629s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.767441] env[61999]: ERROR nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [req-9efd5182-8aa7-4908-8877-c354bdd51d48] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dee5b0a7-9732-42d5-93c0-6b719a790f37. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9efd5182-8aa7-4908-8877-c354bdd51d48"}]}: nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 543.780259] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Acquiring lock "refresh_cache-4736bfe3-2a45-4fd8-8777-9ab1d2800197" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.780738] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Acquired lock "refresh_cache-4736bfe3-2a45-4fd8-8777-9ab1d2800197" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.782209] env[61999]: DEBUG nova.network.neutron [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 543.789206] env[61999]: DEBUG nova.compute.manager [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Received event network-changed-d916bee3-a21b-433c-a05c-bd648929118d {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 543.791828] env[61999]: DEBUG nova.compute.manager [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Refreshing instance network info cache due to event network-changed-d916bee3-a21b-433c-a05c-bd648929118d. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 543.792216] env[61999]: DEBUG oslo_concurrency.lockutils [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] Acquiring lock "refresh_cache-410c01fe-54f7-4e39-8689-aa2dbe7f1c7c" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.792216] env[61999]: DEBUG oslo_concurrency.lockutils [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] Acquired lock "refresh_cache-410c01fe-54f7-4e39-8689-aa2dbe7f1c7c" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.792353] env[61999]: DEBUG nova.network.neutron [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Refreshing network info cache for port d916bee3-a21b-433c-a05c-bd648929118d {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 543.800932] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Refreshing inventories for resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 543.818310] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating ProviderTree inventory for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 543.818615] env[61999]: DEBUG nova.compute.provider_tree [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating inventory in ProviderTree for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 543.827996] env[61999]: DEBUG oslo_vmware.api [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Task: {'id': task-1043209, 'name': PowerOnVM_Task, 'duration_secs': 0.447478} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.828278] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powered on the VM {{(pid=61999) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 543.828750] env[61999]: DEBUG nova.compute.manager [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Checking state {{(pid=61999) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 543.829580] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0946b39-ebcf-4bba-b720-ed397af87915 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.849016] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Refreshing aggregate associations for resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37, aggregates: None {{(pid=61999) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 543.893015] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Refreshing trait associations for resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61999) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 544.068243] env[61999]: INFO nova.compute.manager [-] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Took 1.07 seconds to deallocate network for instance. [ 544.070788] env[61999]: DEBUG nova.compute.claims [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 544.070788] env[61999]: DEBUG oslo_concurrency.lockutils [None req-937d8505-0362-4432-9c34-b6d1ef8b3434 tempest-AttachInterfacesV270Test-1044635886 tempest-AttachInterfacesV270Test-1044635886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.239651] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47d449b-1db7-4b74-8668-a0bcf9471792 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.249718] env[61999]: DEBUG nova.compute.manager [None req-c187832f-648d-43a7-b7d9-4b6c9efc553a tempest-ImagesOneServerNegativeTestJSON-776015163 tempest-ImagesOneServerNegativeTestJSON-776015163-project-member] [instance: f5514c9d-0187-4daa-8db8-6ff7ad3fca8b] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 544.252537] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18adef5a-7424-4cae-b223-ee329b48d556 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.293746] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb663c1-b14a-4b7d-b727-acc1d3f6601f {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.307481] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5146affa-cd10-4d89-926f-435f042e4747 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.322120] env[61999]: DEBUG nova.compute.provider_tree [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating inventory in ProviderTree for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 544.346728] env[61999]: DEBUG oslo_concurrency.lockutils [None req-7e2cbe3e-2a93-41d6-9046-3675f605f621 tempest-ServersAdmin275Test-1663958930 tempest-ServersAdmin275Test-1663958930-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.367920] env[61999]: DEBUG nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 544.374219] env[61999]: DEBUG nova.network.neutron [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.376469] env[61999]: DEBUG nova.network.neutron [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.410843] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:08:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1206001219',id=25,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1991487714',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 544.411126] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.411359] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 544.411456] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.411603] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 544.411760] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 544.411950] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 544.412190] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 544.412383] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 544.412548] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 544.412714] env[61999]: DEBUG nova.virt.hardware [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 544.413907] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707ae9c9-ae72-42b4-bf67-454dd360b804 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.426055] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375f8274-f595-48eb-8997-f6f3362c9f81 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.793210] env[61999]: DEBUG oslo_concurrency.lockutils [None req-c187832f-648d-43a7-b7d9-4b6c9efc553a tempest-ImagesOneServerNegativeTestJSON-776015163 tempest-ImagesOneServerNegativeTestJSON-776015163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.856834] env[61999]: ERROR nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [req-c0de767d-67de-41a6-9cb7-6cc58fd22a50] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dee5b0a7-9732-42d5-93c0-6b719a790f37. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c0de767d-67de-41a6-9cb7-6cc58fd22a50"}]}: nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 544.880622] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Refreshing inventories for resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 544.894628] env[61999]: DEBUG nova.network.neutron [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.901302] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating ProviderTree inventory for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 544.901302] env[61999]: DEBUG nova.compute.provider_tree [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating inventory in ProviderTree for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 544.915018] env[61999]: DEBUG nova.network.neutron [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.927588] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Refreshing aggregate associations for resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37, aggregates: None {{(pid=61999) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 544.958544] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Refreshing trait associations for resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61999) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 544.983315] env[61999]: DEBUG nova.network.neutron [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Successfully created port: 487d81f1-b253-49ab-8342-e8c1ee257638 {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.067157] env[61999]: DEBUG oslo_concurrency.lockutils [None req-adbbbe88-651c-4161-a26c-ed53e9c6abcb tempest-ServerShowV254Test-1457376511 tempest-ServerShowV254Test-1457376511-project-member] Acquiring lock "c7655a1c-ad5e-4e09-a0af-84f11974a58b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.067431] env[61999]: DEBUG oslo_concurrency.lockutils [None req-adbbbe88-651c-4161-a26c-ed53e9c6abcb tempest-ServerShowV254Test-1457376511 tempest-ServerShowV254Test-1457376511-project-member] Lock "c7655a1c-ad5e-4e09-a0af-84f11974a58b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.277960] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4686ca62-2092-4a36-ba3d-4251be2809f6 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.288481] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6cfe36-96b4-4b0c-97aa-e5747d10286c {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.330290] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f855b19-4402-4574-92e4-c8b9809fb6f8 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.346568] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e354974-aa24-4d84-a4db-4fed251a2105 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.369683] env[61999]: DEBUG nova.compute.provider_tree [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating inventory in ProviderTree for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 545.402022] env[61999]: DEBUG oslo_concurrency.lockutils [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] Releasing lock "refresh_cache-410c01fe-54f7-4e39-8689-aa2dbe7f1c7c" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.402022] env[61999]: DEBUG nova.compute.manager [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 410c01fe-54f7-4e39-8689-aa2dbe7f1c7c] Received event network-vif-deleted-d916bee3-a21b-433c-a05c-bd648929118d {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 545.402022] env[61999]: DEBUG nova.compute.manager [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Received event network-changed-7557fb5c-a2d3-4172-82a6-20fab428f77e {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 545.402022] env[61999]: DEBUG nova.compute.manager [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Refreshing instance network info cache due to event network-changed-7557fb5c-a2d3-4172-82a6-20fab428f77e. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 545.402022] env[61999]: DEBUG oslo_concurrency.lockutils [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] Acquiring lock "refresh_cache-4736bfe3-2a45-4fd8-8777-9ab1d2800197" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.418685] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Releasing lock "refresh_cache-4736bfe3-2a45-4fd8-8777-9ab1d2800197" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.419122] env[61999]: DEBUG nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 545.419316] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 545.419712] env[61999]: DEBUG oslo_concurrency.lockutils [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] Acquired lock "refresh_cache-4736bfe3-2a45-4fd8-8777-9ab1d2800197" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.419916] env[61999]: DEBUG nova.network.neutron [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Refreshing network info cache for port 7557fb5c-a2d3-4172-82a6-20fab428f77e {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 545.421165] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12e1903e-4059-46c8-8882-06bad08fbb41 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.439847] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdd1a7b-df47-42a3-8b4b-0b07d41b870b {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.465984] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4736bfe3-2a45-4fd8-8777-9ab1d2800197 could not be found. [ 545.465984] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 545.465984] env[61999]: INFO nova.compute.manager [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Took 0.05 seconds to destroy the instance on the hypervisor. [ 545.465984] env[61999]: DEBUG oslo.service.loopingcall [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 545.465984] env[61999]: DEBUG nova.compute.manager [-] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 545.465984] env[61999]: DEBUG nova.network.neutron [-] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 545.514207] env[61999]: DEBUG nova.network.neutron [-] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.940192] env[61999]: DEBUG nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updated inventory for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with generation 27 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 545.940450] env[61999]: DEBUG nova.compute.provider_tree [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating resource provider dee5b0a7-9732-42d5-93c0-6b719a790f37 generation from 27 to 28 during operation: update_inventory {{(pid=61999) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 545.940630] env[61999]: DEBUG nova.compute.provider_tree [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Updating inventory in ProviderTree for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 546.016491] env[61999]: DEBUG nova.network.neutron [-] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.100644] env[61999]: DEBUG nova.network.neutron [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.457282] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 4.114s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.457282] env[61999]: ERROR nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 546.457282] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Traceback (most recent call last): [ 546.457282] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 546.457282] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self.driver.spawn(context, instance, image_meta, [ 546.457282] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 546.457282] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self._vmops.spawn(context, instance, image_meta, injected_files, [ 546.457282] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 546.457282] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] vm_ref = self.build_virtual_machine(instance, [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] vif_infos = vmwarevif.get_vif_info(self._session, [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] for vif in network_info: [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] return self._sync_wrapper(fn, *args, **kwargs) [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self.wait() [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self[:] = self._gt.wait() [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] return self._exit_event.wait() [ 546.457900] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] result = hub.switch() [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] return self.greenlet.switch() [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] result = function(*args, **kwargs) [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] return func(*args, **kwargs) [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] raise e [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] nwinfo = self.network_api.allocate_for_instance( [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 546.458315] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] created_port_ids = self._update_ports_for_instance( [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] with excutils.save_and_reraise_exception(): [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] self.force_reraise() [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] raise self.value [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] updated_port = self._update_port( [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] _ensure_no_port_binding_failure(port) [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.458761] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] raise exception.PortBindingFailed(port_id=port['id']) [ 546.459612] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] nova.exception.PortBindingFailed: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. [ 546.459612] env[61999]: ERROR nova.compute.manager [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] [ 546.459612] env[61999]: DEBUG nova.compute.utils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. {{(pid=61999) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 546.459612] env[61999]: DEBUG oslo_concurrency.lockutils [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.614s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.464544] env[61999]: INFO nova.compute.claims [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.474551] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Build of instance 91c68a8b-8a6b-4e31-8b76-4da94fa66748 was re-scheduled: Binding failed for port 1adcc44d-200e-4a63-9f0d-09ff6428bad7, please check neutron logs for more information. {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 546.474551] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Unplugging VIFs for instance {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 546.474551] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Acquiring lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.474551] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Acquired lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.474961] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 546.519597] env[61999]: INFO nova.compute.manager [-] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Took 1.05 seconds to deallocate network for instance. [ 546.521839] env[61999]: DEBUG nova.compute.claims [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 546.522095] env[61999]: DEBUG oslo_concurrency.lockutils [None req-8c093a83-7866-47f6-8172-07688aee55e1 tempest-VolumesAssistedSnapshotsTest-1474997295 tempest-VolumesAssistedSnapshotsTest-1474997295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.535181] env[61999]: DEBUG nova.network.neutron [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.042164] env[61999]: DEBUG oslo_concurrency.lockutils [req-1079efed-f827-4255-b083-9f39ca2d1dad req-db2e1dd9-ce40-403b-9f3f-1495951cad72 service nova] Releasing lock "refresh_cache-4736bfe3-2a45-4fd8-8777-9ab1d2800197" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.069791] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.283527] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.530201] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.530201] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Lock "1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.531237] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.531237] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Lock "1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.531816] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Lock "1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.537449] env[61999]: INFO nova.compute.manager [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Terminating instance [ 547.653968] env[61999]: DEBUG oslo_concurrency.lockutils [None req-0ddff05e-a482-4f92-b9dd-74246b36e019 tempest-ServerActionsTestOtherB-241974746 tempest-ServerActionsTestOtherB-241974746-project-member] Acquiring lock "fb0e3710-4e0d-413b-bd2b-e23b66dcb6b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.653968] env[61999]: DEBUG oslo_concurrency.lockutils [None req-0ddff05e-a482-4f92-b9dd-74246b36e019 tempest-ServerActionsTestOtherB-241974746 tempest-ServerActionsTestOtherB-241974746-project-member] Lock "fb0e3710-4e0d-413b-bd2b-e23b66dcb6b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.788259] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Releasing lock "refresh_cache-91c68a8b-8a6b-4e31-8b76-4da94fa66748" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.788259] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 547.788259] env[61999]: DEBUG nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 547.788498] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 547.814669] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.844620] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62255d4c-6654-4f29-af32-f71f0291236a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.853231] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5917f5-da65-4de8-9da7-d0080c134207 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.888528] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eaad0b8-6042-43b4-aaf8-7a3ada17bcb8 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.900738] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba79078-cf5f-4985-bd16-b9d243e6227f {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.924125] env[61999]: DEBUG nova.compute.provider_tree [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.927775] env[61999]: DEBUG nova.compute.manager [req-5c80044a-7486-40e1-a04a-50df1427b44c req-12f1508b-3dd7-49fb-8dab-212de1c725ea service nova] [instance: 4736bfe3-2a45-4fd8-8777-9ab1d2800197] Received event network-vif-deleted-7557fb5c-a2d3-4172-82a6-20fab428f77e {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 548.051685] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "refresh_cache-1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.051999] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquired lock "refresh_cache-1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.052918] env[61999]: DEBUG nova.network.neutron [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.319142] env[61999]: DEBUG nova.network.neutron [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.428274] env[61999]: DEBUG nova.scheduler.client.report [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 548.600412] env[61999]: DEBUG oslo_concurrency.lockutils [None req-16949462-e2e7-46d9-9ff7-68a5808c9f7f tempest-TenantUsagesTestJSON-177729790 tempest-TenantUsagesTestJSON-177729790-project-member] Acquiring lock "6c6e0e0f-82a1-487e-b9cd-9df405b83cbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.600412] env[61999]: DEBUG oslo_concurrency.lockutils [None req-16949462-e2e7-46d9-9ff7-68a5808c9f7f tempest-TenantUsagesTestJSON-177729790 tempest-TenantUsagesTestJSON-177729790-project-member] Lock "6c6e0e0f-82a1-487e-b9cd-9df405b83cbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.633183] env[61999]: DEBUG oslo_concurrency.lockutils [None req-dbff31a2-46dc-4fdc-9323-e8c842b321ea tempest-MultipleCreateTestJSON-1091114335 tempest-MultipleCreateTestJSON-1091114335-project-member] Acquiring lock "b776b673-08f4-4d0e-9c89-129e006369a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.633510] env[61999]: DEBUG oslo_concurrency.lockutils [None req-dbff31a2-46dc-4fdc-9323-e8c842b321ea tempest-MultipleCreateTestJSON-1091114335 tempest-MultipleCreateTestJSON-1091114335-project-member] Lock "b776b673-08f4-4d0e-9c89-129e006369a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.665759] env[61999]: DEBUG oslo_concurrency.lockutils [None req-dbff31a2-46dc-4fdc-9323-e8c842b321ea tempest-MultipleCreateTestJSON-1091114335 tempest-MultipleCreateTestJSON-1091114335-project-member] Acquiring lock "8f7c86ba-13d1-4c69-9566-0441e3609405" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.666602] env[61999]: DEBUG oslo_concurrency.lockutils [None req-dbff31a2-46dc-4fdc-9323-e8c842b321ea tempest-MultipleCreateTestJSON-1091114335 tempest-MultipleCreateTestJSON-1091114335-project-member] Lock "8f7c86ba-13d1-4c69-9566-0441e3609405" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.765151] env[61999]: DEBUG nova.network.neutron [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.826033] env[61999]: INFO nova.compute.manager [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] [instance: 91c68a8b-8a6b-4e31-8b76-4da94fa66748] Took 1.04 seconds to deallocate network for instance. [ 548.929374] env[61999]: ERROR nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 487d81f1-b253-49ab-8342-e8c1ee257638, please check neutron logs for more information. [ 548.929374] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 548.929374] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 548.929374] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 548.929374] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.929374] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 548.929374] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.929374] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 548.929374] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.929374] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 548.929374] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.929374] env[61999]: ERROR nova.compute.manager raise self.value [ 548.929374] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.929374] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 548.929374] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.929374] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 548.929979] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.929979] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 548.929979] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 487d81f1-b253-49ab-8342-e8c1ee257638, please check neutron logs for more information. [ 548.929979] env[61999]: ERROR nova.compute.manager [ 548.929979] env[61999]: Traceback (most recent call last): [ 548.929979] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 548.929979] env[61999]: listener.cb(fileno) [ 548.929979] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.929979] env[61999]: result = function(*args, **kwargs) [ 548.929979] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.929979] env[61999]: return func(*args, **kwargs) [ 548.929979] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 548.929979] env[61999]: raise e [ 548.929979] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 548.929979] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 548.929979] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.929979] env[61999]: created_port_ids = self._update_ports_for_instance( [ 548.929979] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.929979] env[61999]: with excutils.save_and_reraise_exception(): [ 548.929979] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.929979] env[61999]: self.force_reraise() [ 548.929979] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.929979] env[61999]: raise self.value [ 548.929979] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.929979] env[61999]: updated_port = self._update_port( [ 548.929979] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.929979] env[61999]: _ensure_no_port_binding_failure(port) [ 548.929979] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.929979] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 548.930902] env[61999]: nova.exception.PortBindingFailed: Binding failed for port 487d81f1-b253-49ab-8342-e8c1ee257638, please check neutron logs for more information. [ 548.930902] env[61999]: Removing descriptor: 16 [ 548.930902] env[61999]: ERROR nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 487d81f1-b253-49ab-8342-e8c1ee257638, please check neutron logs for more information. [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Traceback (most recent call last): [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] yield resources [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] self.driver.spawn(context, instance, image_meta, [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.930902] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] vm_ref = self.build_virtual_machine(instance, [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] for vif in network_info: [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] return self._sync_wrapper(fn, *args, **kwargs) [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] self.wait() [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] self[:] = self._gt.wait() [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] return self._exit_event.wait() [ 548.932603] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] result = hub.switch() [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] return self.greenlet.switch() [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] result = function(*args, **kwargs) [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] return func(*args, **kwargs) [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] raise e [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] nwinfo = self.network_api.allocate_for_instance( [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.933100] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] created_port_ids = self._update_ports_for_instance( [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] with excutils.save_and_reraise_exception(): [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] self.force_reraise() [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] raise self.value [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] updated_port = self._update_port( [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] _ensure_no_port_binding_failure(port) [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.933531] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] raise exception.PortBindingFailed(port_id=port['id']) [ 548.933868] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] nova.exception.PortBindingFailed: Binding failed for port 487d81f1-b253-49ab-8342-e8c1ee257638, please check neutron logs for more information. [ 548.933868] env[61999]: ERROR nova.compute.manager [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] [ 548.933868] env[61999]: INFO nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Terminating instance [ 548.935723] env[61999]: DEBUG oslo_concurrency.lockutils [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.935942] env[61999]: DEBUG nova.compute.manager [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Start building networks asynchronously for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 548.942169] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.946s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.970177] env[61999]: DEBUG nova.network.neutron [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.305024] env[61999]: DEBUG oslo_concurrency.lockutils [None req-4be901f1-8436-4925-93fc-4ee92588d332 tempest-ServerTagsTestJSON-834177378 tempest-ServerTagsTestJSON-834177378-project-member] Acquiring lock "b2c9ddb4-5c0f-40b2-b876-fcead2df3e24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.305143] env[61999]: DEBUG oslo_concurrency.lockutils [None req-4be901f1-8436-4925-93fc-4ee92588d332 tempest-ServerTagsTestJSON-834177378 tempest-ServerTagsTestJSON-834177378-project-member] Lock "b2c9ddb4-5c0f-40b2-b876-fcead2df3e24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.439872] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Acquiring lock "refresh_cache-491c456d-5e90-46ff-80cf-6cda1d8f657a" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.440104] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Acquired lock "refresh_cache-491c456d-5e90-46ff-80cf-6cda1d8f657a" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.440319] env[61999]: DEBUG nova.network.neutron [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 549.448208] env[61999]: DEBUG nova.compute.utils [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Using /dev/sd instead of None {{(pid=61999) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.453138] env[61999]: DEBUG nova.compute.manager [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Allocating IP information in the background. {{(pid=61999) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 549.454198] env[61999]: DEBUG nova.network.neutron [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] allocate_for_instance() {{(pid=61999) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 549.475725] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Releasing lock "refresh_cache-1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.476141] env[61999]: DEBUG nova.compute.manager [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 549.476324] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 549.477253] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503f1c73-2993-49aa-a966-b47c55b113ae {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.490191] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powering off the VM {{(pid=61999) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 549.490323] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8656a8aa-0035-4aa5-91ff-1ca83e5b9de4 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.502249] env[61999]: DEBUG oslo_vmware.api [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 549.502249] env[61999]: value = "task-1043210" [ 549.502249] env[61999]: _type = "Task" [ 549.502249] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.513206] env[61999]: DEBUG oslo_vmware.api [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.541681] env[61999]: DEBUG nova.policy [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2f59f02e1da345b998dd57272f545836', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7b038c78e66e4b7babf6bf285e4bfb4f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61999) authorize /opt/stack/nova/nova/policy.py:201}} [ 549.861673] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ee866d-543b-4480-95bb-28ec04093349 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.868676] env[61999]: INFO nova.scheduler.client.report [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Deleted allocations for instance 91c68a8b-8a6b-4e31-8b76-4da94fa66748 [ 549.880327] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a0762e-66b6-43f0-ad9b-62b727d0ea8d {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.919526] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5615f555-83fd-497a-8a2a-75209034ea52 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.928757] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731b35ee-7dbd-4842-8dfb-1227f4669273 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.949212] env[61999]: DEBUG nova.compute.provider_tree [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.955271] env[61999]: DEBUG nova.compute.manager [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Start building block device mappings for instance. {{(pid=61999) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 549.982802] env[61999]: DEBUG nova.network.neutron [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.013375] env[61999]: DEBUG oslo_vmware.api [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043210, 'name': PowerOffVM_Task, 'duration_secs': 0.137089} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.013650] env[61999]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Powered off the VM {{(pid=61999) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 550.013814] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Unregistering the VM {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 550.014074] env[61999]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3115335-1acb-4804-a8fd-fe772aafe2ee {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.038968] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Unregistered the VM {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 550.039263] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Deleting contents of the VM from datastore datastore2 {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 550.039382] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleting the datastore file [datastore2] 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5 {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 550.039649] env[61999]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2749fda7-e5d2-4220-9012-840ee9f44915 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.048873] env[61999]: DEBUG oslo_vmware.api [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for the task: (returnval){ [ 550.048873] env[61999]: value = "task-1043212" [ 550.048873] env[61999]: _type = "Task" [ 550.048873] env[61999]: } to complete. {{(pid=61999) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.056730] env[61999]: DEBUG oslo_vmware.api [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.197791] env[61999]: DEBUG nova.network.neutron [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.285580] env[61999]: DEBUG oslo_concurrency.lockutils [None req-a04f3313-2f6e-41fa-8e84-2ce69a55149a tempest-InstanceActionsV221TestJSON-1129020603 tempest-InstanceActionsV221TestJSON-1129020603-project-member] Acquiring lock "7396d67b-9a5f-461d-be85-5b4afe1637f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.285580] env[61999]: DEBUG oslo_concurrency.lockutils [None req-a04f3313-2f6e-41fa-8e84-2ce69a55149a tempest-InstanceActionsV221TestJSON-1129020603 tempest-InstanceActionsV221TestJSON-1129020603-project-member] Lock "7396d67b-9a5f-461d-be85-5b4afe1637f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.304618] env[61999]: DEBUG nova.network.neutron [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Successfully created port: 249e0936-1be8-45ad-bf30-ede48f38b5d5 {{(pid=61999) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.377545] env[61999]: DEBUG oslo_concurrency.lockutils [None req-be5d2708-bf42-4928-ad19-4987f36841e5 tempest-ServersAdminNegativeTestJSON-401293581 tempest-ServersAdminNegativeTestJSON-401293581-project-member] Lock "91c68a8b-8a6b-4e31-8b76-4da94fa66748" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.298s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.455026] env[61999]: DEBUG nova.scheduler.client.report [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 550.557622] env[61999]: DEBUG oslo_vmware.api [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Task: {'id': task-1043212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101219} completed successfully. {{(pid=61999) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.559960] env[61999]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Deleted the datastore file {{(pid=61999) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 550.559960] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Deleted contents of the VM from datastore datastore2 {{(pid=61999) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 550.559960] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 550.559960] env[61999]: INFO nova.compute.manager [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Took 1.08 seconds to destroy the instance on the hypervisor. [ 550.559960] env[61999]: DEBUG oslo.service.loopingcall [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.560213] env[61999]: DEBUG nova.compute.manager [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 550.560213] env[61999]: DEBUG nova.network.neutron [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 550.598864] env[61999]: DEBUG nova.network.neutron [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.701142] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Releasing lock "refresh_cache-491c456d-5e90-46ff-80cf-6cda1d8f657a" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.701705] env[61999]: DEBUG nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Start destroying the instance on the hypervisor. {{(pid=61999) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 550.701826] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Destroying instance {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 550.702045] env[61999]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e456cba-f83f-41cf-84eb-ef3aea9bb3d0 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.711141] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dda5d29-ce55-462b-9d6f-7c64a0a8c516 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.746978] env[61999]: WARNING nova.virt.vmwareapi.vmops [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 491c456d-5e90-46ff-80cf-6cda1d8f657a could not be found. [ 550.746978] env[61999]: DEBUG nova.virt.vmwareapi.vmops [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Instance destroyed {{(pid=61999) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 550.746978] env[61999]: INFO nova.compute.manager [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 550.747240] env[61999]: DEBUG oslo.service.loopingcall [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61999) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.751027] env[61999]: DEBUG nova.compute.manager [-] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 550.751027] env[61999]: DEBUG nova.network.neutron [-] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 550.763373] env[61999]: DEBUG nova.compute.manager [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Received event network-changed-487d81f1-b253-49ab-8342-e8c1ee257638 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 550.763476] env[61999]: DEBUG nova.compute.manager [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Refreshing instance network info cache due to event network-changed-487d81f1-b253-49ab-8342-e8c1ee257638. {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11609}} [ 550.763673] env[61999]: DEBUG oslo_concurrency.lockutils [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] Acquiring lock "refresh_cache-491c456d-5e90-46ff-80cf-6cda1d8f657a" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.763813] env[61999]: DEBUG oslo_concurrency.lockutils [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] Acquired lock "refresh_cache-491c456d-5e90-46ff-80cf-6cda1d8f657a" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.764210] env[61999]: DEBUG nova.network.neutron [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Refreshing network info cache for port 487d81f1-b253-49ab-8342-e8c1ee257638 {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 550.789854] env[61999]: DEBUG nova.network.neutron [-] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.880057] env[61999]: DEBUG nova.compute.manager [None req-0ec6147b-3cf8-4631-bc91-1aed32e1dceb tempest-VolumesAdminNegativeTest-1334958800 tempest-VolumesAdminNegativeTest-1334958800-project-member] [instance: 50dd7faa-07d5-4f9b-89e2-da387d10a115] Starting instance... {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 550.957223] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.019s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.958031] env[61999]: ERROR nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Traceback (most recent call last): [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self.driver.spawn(context, instance, image_meta, [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] vm_ref = self.build_virtual_machine(instance, [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.958031] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] for vif in network_info: [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] return self._sync_wrapper(fn, *args, **kwargs) [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self.wait() [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self[:] = self._gt.wait() [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] return self._exit_event.wait() [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] result = hub.switch() [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.959327] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] return self.greenlet.switch() [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] result = function(*args, **kwargs) [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] return func(*args, **kwargs) [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] raise e [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] nwinfo = self.network_api.allocate_for_instance( [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] created_port_ids = self._update_ports_for_instance( [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] with excutils.save_and_reraise_exception(): [ 550.959733] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] self.force_reraise() [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] raise self.value [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] updated_port = self._update_port( [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] _ensure_no_port_binding_failure(port) [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] raise exception.PortBindingFailed(port_id=port['id']) [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] nova.exception.PortBindingFailed: Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. [ 550.960106] env[61999]: ERROR nova.compute.manager [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] [ 550.960462] env[61999]: DEBUG nova.compute.utils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. {{(pid=61999) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 550.960462] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.945s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.963938] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Build of instance bf8cf38f-4305-4f7b-a262-e3e7e863f3d1 was re-scheduled: Binding failed for port 2b139f42-4096-4b41-9635-757e2d880205, please check neutron logs for more information. {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 550.964726] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Unplugging VIFs for instance {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 550.964726] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Acquiring lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.964904] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Acquired lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.964972] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 550.967093] env[61999]: DEBUG nova.compute.manager [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Start spawning the instance on the hypervisor. {{(pid=61999) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 550.998009] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T17:06:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T17:06:09Z,direct_url=,disk_format='vmdk',id=9eed6d08-b529-4317-89cc-ae13b2d60cea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='df2c6d94ccc84e66846b70583d33a039',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T17:06:10Z,virtual_size=,visibility=), allow threads: False {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 550.998273] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Flavor limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 550.998460] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Image limits 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 550.998689] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Flavor pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 550.998841] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Image pref 0:0:0 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 550.998986] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61999) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 550.999861] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 551.000055] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 551.000232] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Got 1 possible topologies {{(pid=61999) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 551.000398] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 551.000574] env[61999]: DEBUG nova.virt.hardware [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61999) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 551.001595] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1a332c-de7f-4547-ba86-91b34bb20934 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.011882] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbcc813-f813-4db3-a368-a008709e2476 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.101465] env[61999]: DEBUG nova.network.neutron [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.294849] env[61999]: DEBUG nova.network.neutron [-] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.299589] env[61999]: DEBUG nova.network.neutron [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.414631] env[61999]: DEBUG oslo_concurrency.lockutils [None req-0ec6147b-3cf8-4631-bc91-1aed32e1dceb tempest-VolumesAdminNegativeTest-1334958800 tempest-VolumesAdminNegativeTest-1334958800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.532375] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.554980] env[61999]: DEBUG nova.network.neutron [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.607841] env[61999]: INFO nova.compute.manager [-] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Took 1.05 seconds to deallocate network for instance. [ 551.782674] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.802435] env[61999]: INFO nova.compute.manager [-] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Took 1.05 seconds to deallocate network for instance. [ 551.808355] env[61999]: DEBUG nova.compute.claims [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Aborting claim: {{(pid=61999) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 551.808587] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e607b1c0-db61-4d22-82ba-e4310a20672a tempest-ServersWithSpecificFlavorTestJSON-361710550 tempest-ServersWithSpecificFlavorTestJSON-361710550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.847643] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd43c61f-89c0-4287-b181-946eefc2f77a {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.855324] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc68ed9a-31b4-4fe2-8124-681469c6c4b5 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.887133] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb96f9b-d4bc-4936-8936-458c2f1292a0 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.895549] env[61999]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0caa9b-af98-47cc-80bc-41b9f3459ed7 {{(pid=61999) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.911866] env[61999]: DEBUG nova.compute.provider_tree [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Inventory has not changed in ProviderTree for provider: dee5b0a7-9732-42d5-93c0-6b719a790f37 {{(pid=61999) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.058243] env[61999]: DEBUG oslo_concurrency.lockutils [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] Releasing lock "refresh_cache-491c456d-5e90-46ff-80cf-6cda1d8f657a" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.058734] env[61999]: DEBUG nova.compute.manager [req-862f7894-55bb-482b-9d0b-7020c17157e1 req-d18c2001-0e0f-45fa-bd74-6b8c151abc2a service nova] [instance: 491c456d-5e90-46ff-80cf-6cda1d8f657a] Received event network-vif-deleted-487d81f1-b253-49ab-8342-e8c1ee257638 {{(pid=61999) external_instance_event /opt/stack/nova/nova/compute/manager.py:11604}} [ 552.115975] env[61999]: DEBUG oslo_concurrency.lockutils [None req-6e905c37-b602-445a-b143-bedbcead775a tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.285144] env[61999]: DEBUG oslo_concurrency.lockutils [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Releasing lock "refresh_cache-bf8cf38f-4305-4f7b-a262-e3e7e863f3d1" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.285600] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 552.286864] env[61999]: DEBUG nova.compute.manager [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Deallocating network for instance {{(pid=61999) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 552.286864] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] deallocate_for_instance() {{(pid=61999) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 552.327641] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e2973eb6-97aa-4eb3-8e5b-0bc98994fae0 tempest-AttachInterfacesUnderV243Test-659804474 tempest-AttachInterfacesUnderV243Test-659804474-project-member] Acquiring lock "da9a7498-93d3-4a51-8a7b-f9fbb3d3f7f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.327851] env[61999]: DEBUG oslo_concurrency.lockutils [None req-e2973eb6-97aa-4eb3-8e5b-0bc98994fae0 tempest-AttachInterfacesUnderV243Test-659804474 tempest-AttachInterfacesUnderV243Test-659804474-project-member] Lock "da9a7498-93d3-4a51-8a7b-f9fbb3d3f7f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.332268] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Instance cache missing network info. {{(pid=61999) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.415473] env[61999]: DEBUG nova.scheduler.client.report [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Inventory has not changed for provider dee5b0a7-9732-42d5-93c0-6b719a790f37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61999) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 552.586300] env[61999]: DEBUG oslo_concurrency.lockutils [None req-db286e53-35c6-4742-9f9b-1e72a597747a tempest-SecurityGroupsTestJSON-1414201619 tempest-SecurityGroupsTestJSON-1414201619-project-member] Acquiring lock "01cb7994-570a-4ad6-b8d1-c1fe54afa57d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.586300] env[61999]: DEBUG oslo_concurrency.lockutils [None req-db286e53-35c6-4742-9f9b-1e72a597747a tempest-SecurityGroupsTestJSON-1414201619 tempest-SecurityGroupsTestJSON-1414201619-project-member] Lock "01cb7994-570a-4ad6-b8d1-c1fe54afa57d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.620057] env[61999]: DEBUG oslo_concurrency.lockutils [None req-47aa8926-34a7-4c86-9030-2468ee9be1b2 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Acquiring lock "a98e0990-c773-40e3-98ed-a1c567596d14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.620410] env[61999]: DEBUG oslo_concurrency.lockutils [None req-47aa8926-34a7-4c86-9030-2468ee9be1b2 tempest-MigrationsAdminTest-1188413466 tempest-MigrationsAdminTest-1188413466-project-member] Lock "a98e0990-c773-40e3-98ed-a1c567596d14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.834981] env[61999]: DEBUG nova.network.neutron [None req-84a6e4ba-97b4-40f8-b943-dbb6ee183f72 tempest-AttachInterfacesTestJSON-2050382962 tempest-AttachInterfacesTestJSON-2050382962-project-member] [instance: bf8cf38f-4305-4f7b-a262-e3e7e863f3d1] Updating instance_info_cache with network_info: [] {{(pid=61999) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.922751] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.963s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.923501] env[61999]: ERROR nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Traceback (most recent call last): [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self.driver.spawn(context, instance, image_meta, [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] vm_ref = self.build_virtual_machine(instance, [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.923501] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] for vif in network_info: [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] return self._sync_wrapper(fn, *args, **kwargs) [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self.wait() [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self[:] = self._gt.wait() [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] return self._exit_event.wait() [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] result = hub.switch() [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.923912] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] return self.greenlet.switch() [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] result = function(*args, **kwargs) [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] return func(*args, **kwargs) [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] raise e [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] nwinfo = self.network_api.allocate_for_instance( [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] created_port_ids = self._update_ports_for_instance( [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] with excutils.save_and_reraise_exception(): [ 552.924363] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] self.force_reraise() [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] raise self.value [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] updated_port = self._update_port( [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] _ensure_no_port_binding_failure(port) [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] raise exception.PortBindingFailed(port_id=port['id']) [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] nova.exception.PortBindingFailed: Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. [ 552.924802] env[61999]: ERROR nova.compute.manager [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] [ 552.925184] env[61999]: DEBUG nova.compute.utils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. {{(pid=61999) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 552.927463] env[61999]: DEBUG oslo_concurrency.lockutils [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.196s {{(pid=61999) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.927463] env[61999]: DEBUG nova.objects.instance [None req-cc4d784f-539a-41b4-8a0a-654d721ae2c0 tempest-ServersAdmin275Test-268209774 tempest-ServersAdmin275Test-268209774-project-member] [instance: 1f23e4c8-c961-4bb0-8726-fe5bd3a9b2d5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61999) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 552.933859] env[61999]: DEBUG nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Build of instance 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e was re-scheduled: Binding failed for port cd1d24a7-cc00-4b0a-b5b7-2ac6a432af1b, please check neutron logs for more information. {{(pid=61999) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 552.934330] env[61999]: DEBUG nova.compute.manager [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Unplugging VIFs for instance {{(pid=61999) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 552.934576] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Acquiring lock "refresh_cache-75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.934737] env[61999]: DEBUG oslo_concurrency.lockutils [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] Acquired lock "refresh_cache-75dafb5a-1a6d-4fdf-9e35-f1d7a213422e" {{(pid=61999) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.934896] env[61999]: DEBUG nova.network.neutron [None req-01983e09-0b44-448b-90aa-9a4f91ffb324 tempest-ServerDiagnosticsNegativeTest-940942930 tempest-ServerDiagnosticsNegativeTest-940942930-project-member] [instance: 75dafb5a-1a6d-4fdf-9e35-f1d7a213422e] Building network info cache for instance {{(pid=61999) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.207164] env[61999]: ERROR nova.compute.manager [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 249e0936-1be8-45ad-bf30-ede48f38b5d5, please check neutron logs for more information. [ 553.207164] env[61999]: ERROR nova.compute.manager Traceback (most recent call last): [ 553.207164] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.207164] env[61999]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 553.207164] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.207164] env[61999]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 553.207164] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.207164] env[61999]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 553.207164] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.207164] env[61999]: ERROR nova.compute.manager self.force_reraise() [ 553.207164] env[61999]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.207164] env[61999]: ERROR nova.compute.manager raise self.value [ 553.207164] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.207164] env[61999]: ERROR nova.compute.manager updated_port = self._update_port( [ 553.207164] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.207164] env[61999]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 553.207754] env[61999]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.207754] env[61999]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 553.207754] env[61999]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 249e0936-1be8-45ad-bf30-ede48f38b5d5, please check neutron logs for more information. [ 553.207754] env[61999]: ERROR nova.compute.manager [ 553.207754] env[61999]: Traceback (most recent call last): [ 553.207754] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 553.207754] env[61999]: listener.cb(fileno) [ 553.207754] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.207754] env[61999]: result = function(*args, **kwargs) [ 553.207754] env[61999]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.207754] env[61999]: return func(*args, **kwargs) [ 553.207754] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 553.207754] env[61999]: raise e [ 553.207754] env[61999]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.207754] env[61999]: nwinfo = self.network_api.allocate_for_instance( [ 553.207754] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.207754] env[61999]: created_port_ids = self._update_ports_for_instance( [ 553.207754] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.207754] env[61999]: with excutils.save_and_reraise_exception(): [ 553.207754] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.207754] env[61999]: self.force_reraise() [ 553.207754] env[61999]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.207754] env[61999]: raise self.value [ 553.207754] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.207754] env[61999]: updated_port = self._update_port( [ 553.207754] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.207754] env[61999]: _ensure_no_port_binding_failure(port) [ 553.207754] env[61999]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.207754] env[61999]: raise exception.PortBindingFailed(port_id=port['id']) [ 553.208631] env[61999]: nova.exception.PortBindingFailed: Binding failed for port 249e0936-1be8-45ad-bf30-ede48f38b5d5, please check neutron logs for more information. [ 553.208631] env[61999]: Removing descriptor: 18 [ 553.208631] env[61999]: ERROR nova.compute.manager [None req-786a7940-de31-479d-8362-41fd0dbd8c73 tempest-ServerDiskConfigTestJSON-405309098 tempest-ServerDiskConfigTestJSON-405309098-project-member] [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 249e0936-1be8-45ad-bf30-ede48f38b5d5, please check neutron logs for more information. [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] Traceback (most recent call last): [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] yield resources [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] self.driver.spawn(context, instance, image_meta, [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.208631] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] vm_ref = self.build_virtual_machine(instance, [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] for vif in network_info: [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] return self._sync_wrapper(fn, *args, **kwargs) [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] self.wait() [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] self[:] = self._gt.wait() [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] return self._exit_event.wait() [ 553.208983] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] result = hub.switch() [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] return self.greenlet.switch() [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] result = function(*args, **kwargs) [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] return func(*args, **kwargs) [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] raise e [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] nwinfo = self.network_api.allocate_for_instance( [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.209457] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] created_port_ids = self._update_ports_for_instance( [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] with excutils.save_and_reraise_exception(): [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] self.force_reraise() [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] raise self.value [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] updated_port = self._update_port( [ 553.209847] env[61999]: ERROR nova.compute.manager [instance: 92b4eec6-d38d-465e-a575-62f900400f7c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.209847] env[61999]: