[ 490.145411] env[62109]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62109) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 490.145758] env[62109]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62109) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 490.145888] env[62109]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62109) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 490.146202] env[62109]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 490.233886] env[62109]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62109) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 490.242965] env[62109]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=62109) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 490.288918] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e5d98315-c318-479c-a5cc-fda38ea6c2ba None None] Creating reply queue: reply_7522b64acfeb4981b1f36928b040d568 [ 490.299099] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e5d98315-c318-479c-a5cc-fda38ea6c2ba None None] Expecting reply to msg 7ac8374de69e410dade1c107b51a90df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 490.314025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ac8374de69e410dade1c107b51a90df [ 490.848235] env[62109]: INFO nova.virt.driver [None req-e5d98315-c318-479c-a5cc-fda38ea6c2ba None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 490.918215] env[62109]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 490.918362] env[62109]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 490.918461] env[62109]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62109) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 494.038818] env[62109]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-de9d3c5a-9c6e-4363-b8df-bb8df106adf6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.055496] env[62109]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62109) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 494.055640] env[62109]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-9b2b7cbf-a53d-4175-b780-9b66457e728e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.079863] env[62109]: INFO oslo_vmware.api [-] Successfully established new session; session ID is f494d. [ 494.080039] env[62109]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.162s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 494.080591] env[62109]: INFO nova.virt.vmwareapi.driver [None req-e5d98315-c318-479c-a5cc-fda38ea6c2ba None None] VMware vCenter version: 7.0.3 [ 494.083913] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ac3827-e68c-4dca-a9af-35dc610c462b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.101255] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d98e1d-d44f-4847-aae3-e616558ed4fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.107358] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e8c413-7062-4273-8ebf-5bbb5ade5cf0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.113928] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c797cc-670c-4888-ad39-54254956027a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.126842] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c275bfa8-6d62-4947-8da8-c360d8fc7776 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.132944] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd71c53-59e5-492c-8148-7fabea1420dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.163399] env[62109]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-3c6af044-58b0-4d14-9c84-6d5c41d28937 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.168890] env[62109]: DEBUG nova.virt.vmwareapi.driver [None req-e5d98315-c318-479c-a5cc-fda38ea6c2ba None None] Extension org.openstack.compute already exists. {{(pid=62109) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 494.171546] env[62109]: INFO nova.compute.provider_config [None req-e5d98315-c318-479c-a5cc-fda38ea6c2ba None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 494.172232] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e5d98315-c318-479c-a5cc-fda38ea6c2ba None None] Expecting reply to msg 7124c1f05d9444f8bc5b6225528e8392 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 494.189583] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7124c1f05d9444f8bc5b6225528e8392 [ 494.674981] env[62109]: DEBUG nova.context [None req-e5d98315-c318-479c-a5cc-fda38ea6c2ba None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),89980bcd-ea65-4338-957c-d9d250569837(cell1) {{(pid=62109) load_cells /opt/stack/nova/nova/context.py:464}} [ 494.677114] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 494.677337] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.678094] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 494.678689] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Acquiring lock "89980bcd-ea65-4338-957c-d9d250569837" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 494.678813] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Lock "89980bcd-ea65-4338-957c-d9d250569837" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.679771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Lock "89980bcd-ea65-4338-957c-d9d250569837" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 494.699705] env[62109]: INFO dbcounter [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Registered counter for database nova_cell0 [ 494.708237] env[62109]: INFO dbcounter [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Registered counter for database nova_cell1 [ 494.711311] env[62109]: DEBUG oslo_db.sqlalchemy.engines [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62109) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 494.711913] env[62109]: DEBUG oslo_db.sqlalchemy.engines [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62109) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 494.716705] env[62109]: ERROR nova.db.main.api [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 494.716705] env[62109]: result = function(*args, **kwargs) [ 494.716705] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 494.716705] env[62109]: return func(*args, **kwargs) [ 494.716705] env[62109]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 494.716705] env[62109]: result = fn(*args, **kwargs) [ 494.716705] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 494.716705] env[62109]: return f(*args, **kwargs) [ 494.716705] env[62109]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 494.716705] env[62109]: return db.service_get_minimum_version(context, binaries) [ 494.716705] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 494.716705] env[62109]: _check_db_access() [ 494.716705] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 494.716705] env[62109]: stacktrace = ''.join(traceback.format_stack()) [ 494.716705] env[62109]: [ 494.717501] env[62109]: ERROR nova.db.main.api [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 494.717501] env[62109]: result = function(*args, **kwargs) [ 494.717501] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 494.717501] env[62109]: return func(*args, **kwargs) [ 494.717501] env[62109]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 494.717501] env[62109]: result = fn(*args, **kwargs) [ 494.717501] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 494.717501] env[62109]: return f(*args, **kwargs) [ 494.717501] env[62109]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 494.717501] env[62109]: return db.service_get_minimum_version(context, binaries) [ 494.717501] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 494.717501] env[62109]: _check_db_access() [ 494.717501] env[62109]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 494.717501] env[62109]: stacktrace = ''.join(traceback.format_stack()) [ 494.717501] env[62109]: [ 494.718145] env[62109]: WARNING nova.objects.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 494.718145] env[62109]: WARNING nova.objects.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Failed to get minimum service version for cell 89980bcd-ea65-4338-957c-d9d250569837 [ 494.718456] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Acquiring lock "singleton_lock" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 494.718617] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Acquired lock "singleton_lock" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 494.718902] env[62109]: DEBUG oslo_concurrency.lockutils [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Releasing lock "singleton_lock" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 494.719231] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Full set of CONF: {{(pid=62109) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 494.719378] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ******************************************************************************** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 494.719504] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] Configuration options gathered from: {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 494.719638] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 494.719822] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 494.719950] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ================================================================================ {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 494.720176] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] allow_resize_to_same_host = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.720344] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] arq_binding_timeout = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.720474] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] backdoor_port = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.720600] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] backdoor_socket = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.720798] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] block_device_allocate_retries = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.720973] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] block_device_allocate_retries_interval = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.721142] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cert = self.pem {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.721309] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.721476] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute_monitors = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.721637] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] config_dir = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.721803] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] config_drive_format = iso9660 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.721937] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.722098] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] config_source = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.722262] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] console_host = devstack {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.722423] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] control_exchange = nova {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.722578] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cpu_allocation_ratio = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.722737] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] daemon = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.722905] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] debug = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.723063] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] default_access_ip_network_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.723226] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] default_availability_zone = nova {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.723380] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] default_ephemeral_format = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.723535] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] default_green_pool_size = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.723771] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.723934] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] default_schedule_zone = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.724104] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] disk_allocation_ratio = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.724263] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] enable_new_services = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.724440] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] enabled_apis = ['osapi_compute'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.724603] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] enabled_ssl_apis = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.724761] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] flat_injected = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.724923] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] force_config_drive = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.725083] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] force_raw_images = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.725251] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] graceful_shutdown_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.725413] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] heal_instance_info_cache_interval = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.725622] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] host = cpu-1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.725796] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.725962] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.726127] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.726327] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.726494] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] instance_build_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.726656] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] instance_delete_interval = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.726823] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] instance_format = [instance: %(uuid)s] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.726989] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] instance_name_template = instance-%08x {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.727151] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] instance_usage_audit = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.727318] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] instance_usage_audit_period = month {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.727483] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.727652] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.727842] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] internal_service_availability_zone = internal {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.727983] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] key = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.728186] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] live_migration_retry_count = 30 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.728402] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_color = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.728517] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_config_append = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.728683] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.728864] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_dir = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.729038] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.729167] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_options = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.729327] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_rotate_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.729495] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_rotate_interval_type = days {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.729661] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] log_rotation_type = none {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.729795] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.729926] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.730098] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.730265] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.730392] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.730552] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] long_rpc_timeout = 1800 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.730714] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] max_concurrent_builds = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.730872] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] max_concurrent_live_migrations = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.731030] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] max_concurrent_snapshots = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.731185] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] max_local_block_devices = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.731343] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] max_logfile_count = 30 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.731499] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] max_logfile_size_mb = 200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.731655] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] maximum_instance_delete_attempts = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.731823] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] metadata_listen = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.731991] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] metadata_listen_port = 8775 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.732188] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] metadata_workers = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.732341] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] migrate_max_retries = -1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.732507] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] mkisofs_cmd = genisoimage {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.732713] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.732849] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] my_ip = 10.180.1.21 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.733014] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] network_allocate_retries = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.733192] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.733360] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.733522] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] osapi_compute_listen_port = 8774 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.733688] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] osapi_compute_unique_server_name_scope = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.733853] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] osapi_compute_workers = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.734017] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] password_length = 12 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.734175] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] periodic_enable = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.734331] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] periodic_fuzzy_delay = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.734496] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] pointer_model = usbtablet {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.734661] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] preallocate_images = none {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.734819] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] publish_errors = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.734949] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] pybasedir = /opt/stack/nova {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.735102] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ram_allocation_ratio = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.735260] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] rate_limit_burst = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.735427] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] rate_limit_except_level = CRITICAL {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.735586] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] rate_limit_interval = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.735743] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] reboot_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.735898] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] reclaim_instance_interval = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.736072] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] record = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.736244] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] reimage_timeout_per_gb = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.736410] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] report_interval = 120 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.736569] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] rescue_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.736725] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] reserved_host_cpus = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.736884] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] reserved_host_disk_mb = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.737044] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] reserved_host_memory_mb = 512 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.737204] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] reserved_huge_pages = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.737362] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] resize_confirm_window = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.737520] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] resize_fs_using_block_device = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.737685] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] resume_guests_state_on_host_boot = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.737873] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.738052] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] rpc_response_timeout = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.738217] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] run_external_periodic_tasks = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.738386] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] running_deleted_instance_action = reap {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.738548] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.738703] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] running_deleted_instance_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.738908] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler_instance_sync_interval = 120 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.739120] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_down_time = 720 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.739295] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] servicegroup_driver = db {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.739458] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] shelved_offload_time = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.739616] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] shelved_poll_interval = 3600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.739782] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] shutdown_timeout = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.739941] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] source_is_ipv6 = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.740117] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ssl_only = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.740358] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.740526] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] sync_power_state_interval = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.740687] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] sync_power_state_pool_size = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.740854] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] syslog_log_facility = LOG_USER {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.741011] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] tempdir = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.741171] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] timeout_nbd = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.741335] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] transport_url = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.741568] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] update_resources_interval = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.741749] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] use_cow_images = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.741909] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] use_eventlog = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.742075] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] use_journal = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.742230] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] use_json = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.742389] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] use_rootwrap_daemon = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.742544] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] use_stderr = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.742704] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] use_syslog = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.742860] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vcpu_pin_set = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.743029] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plugging_is_fatal = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.743196] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plugging_timeout = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.743361] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] virt_mkfs = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.743521] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] volume_usage_poll_interval = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.743690] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] watch_log_file = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.743840] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] web = /usr/share/spice-html5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 494.744090] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_concurrency.disable_process_locking = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.744342] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.744522] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.744688] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.744861] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.745032] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.745196] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.745375] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.auth_strategy = keystone {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.745538] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.compute_link_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.745726] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.745930] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.dhcp_domain = novalocal {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.746109] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.enable_instance_password = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.746274] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.glance_link_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.746444] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.746618] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.746783] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.instance_list_per_project_cells = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.746946] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.list_records_by_skipping_down_cells = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.747110] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.local_metadata_per_cell = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.747277] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.max_limit = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.747443] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.metadata_cache_expiration = 15 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.747614] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.neutron_default_tenant_id = default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.747782] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.response_validation = warn {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.747986] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.use_neutron_default_nets = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.748179] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.748347] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.748515] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.748693] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.748887] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.vendordata_dynamic_targets = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.749062] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.vendordata_jsonfile_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.749245] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.749437] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.backend = dogpile.cache.memcached {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.749606] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.backend_argument = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.749775] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.config_prefix = cache.oslo {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.749945] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.dead_timeout = 60.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.750111] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.debug_cache_backend = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.750275] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.enable_retry_client = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.750438] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.enable_socket_keepalive = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.750606] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.enabled = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.750768] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.enforce_fips_mode = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.750932] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.expiration_time = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.751096] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.hashclient_retry_attempts = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.751262] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.751423] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_dead_retry = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.751579] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_password = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.751742] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.751905] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.752082] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_pool_maxsize = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.752248] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.752410] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_sasl_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.752587] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.752754] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.752912] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.memcache_username = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.753077] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.proxies = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.753238] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.redis_db = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.753395] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.redis_password = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.753562] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.753736] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.753903] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.redis_server = localhost:6379 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.754071] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.redis_socket_timeout = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.754225] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.redis_username = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.754386] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.retry_attempts = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.754545] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.retry_delay = 0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.754704] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.socket_keepalive_count = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.754863] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.socket_keepalive_idle = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.755023] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.socket_keepalive_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.755180] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.tls_allowed_ciphers = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.755336] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.tls_cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.755489] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.tls_certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.755649] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.tls_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.755835] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cache.tls_keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.756016] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.756196] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.auth_type = password {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.756355] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.756529] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.756688] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.756851] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.757015] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.cross_az_attach = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.757176] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.debug = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.757331] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.endpoint_template = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.757491] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.http_retries = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.757650] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.757880] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.758014] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.os_region_name = RegionOne {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.758184] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.758344] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cinder.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.758518] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.758677] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.cpu_dedicated_set = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.758837] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.cpu_shared_set = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.759024] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.image_type_exclude_list = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.759194] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.759359] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.759522] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.759687] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.759855] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.760050] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.resource_provider_association_refresh = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.760195] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.760359] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.shutdown_retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.760539] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.760718] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] conductor.workers = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.760894] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] console.allowed_origins = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.761055] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] console.ssl_ciphers = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.761226] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] console.ssl_minimum_version = default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.761394] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] consoleauth.enforce_session_timeout = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.761560] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] consoleauth.token_ttl = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.761723] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.761877] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.762040] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.762195] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.762351] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.762506] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.762667] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.762822] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.762981] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.763137] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.763291] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.763446] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.763599] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.763763] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.service_type = accelerator {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.763924] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.764094] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.764255] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.764409] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.764647] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.764926] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] cyborg.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.765234] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.backend = sqlalchemy {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.765531] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.connection = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.765829] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.connection_debug = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.766129] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.connection_parameters = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.766422] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.connection_recycle_time = 3600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.766715] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.connection_trace = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.767004] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.db_inc_retry_interval = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.767296] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.db_max_retries = 20 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.767587] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.db_max_retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.767884] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.db_retry_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.768197] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.max_overflow = 50 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.768491] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.max_pool_size = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.768782] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.max_retries = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.769086] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.769375] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.mysql_wsrep_sync_wait = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.769648] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.pool_timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.769943] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.770235] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.slave_connection = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.770456] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.sqlite_synchronous = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.770634] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] database.use_db_reconnect = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.770818] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.backend = sqlalchemy {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.770993] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.connection = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.771163] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.connection_debug = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.771336] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.connection_parameters = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.771502] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.connection_recycle_time = 3600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.771666] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.connection_trace = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.771829] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.db_inc_retry_interval = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.771995] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.db_max_retries = 20 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.772177] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.db_max_retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.772345] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.db_retry_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.772510] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.max_overflow = 50 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.772673] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.max_pool_size = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.772835] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.max_retries = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.773007] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.773166] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.773324] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.pool_timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.773489] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.retry_interval = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.773648] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.slave_connection = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.773812] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] api_database.sqlite_synchronous = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.773989] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] devices.enabled_mdev_types = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.774167] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.774339] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.774504] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ephemeral_storage_encryption.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.774667] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.774839] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.api_servers = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.775006] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.775165] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.775331] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.775491] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.775648] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.775812] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.debug = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.775979] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.default_trusted_certificate_ids = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.776160] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.enable_certificate_validation = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.776325] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.enable_rbd_download = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.776485] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.776652] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.776815] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.776978] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.777135] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.777299] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.num_retries = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.777467] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.rbd_ceph_conf = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.777630] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.rbd_connect_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.777799] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.rbd_pool = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.777988] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.rbd_user = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.778154] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.778314] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.778472] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.778641] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.service_type = image {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.778807] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.778985] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.779152] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.779313] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.779495] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.779670] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.verify_glance_signatures = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.779832] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] glance.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.780073] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] guestfs.debug = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.780186] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] mks.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.780553] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.780745] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] image_cache.manager_interval = 2400 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.780915] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] image_cache.precache_concurrency = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.781091] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] image_cache.remove_unused_base_images = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.781262] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.781440] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.781616] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] image_cache.subdirectory_name = _base {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.781794] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.api_max_retries = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.781961] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.api_retry_interval = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.782122] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.782283] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.782439] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.782597] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.782761] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.782924] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.conductor_group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.783084] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.783243] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.783401] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.783563] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.783721] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.783879] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.784048] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.784277] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.peer_list = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.784463] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.784627] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.784793] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.serial_console_state_timeout = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.784954] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.785123] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.service_type = baremetal {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.785283] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.shard = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.785446] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.785603] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.785763] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.785918] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.786099] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.786259] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ironic.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.786441] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.786613] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] key_manager.fixed_key = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.786796] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.786961] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.barbican_api_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.787118] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.barbican_endpoint = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.787290] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.barbican_endpoint_type = public {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.787449] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.barbican_region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.787605] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.787760] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.787956] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.788181] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.788303] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.788470] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.number_of_retries = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.788633] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.retry_delay = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.788809] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.send_service_user_token = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.789002] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.789169] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.789338] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.verify_ssl = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.789500] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican.verify_ssl_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.789668] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.789831] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.789990] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.790147] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.790310] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.790471] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.790628] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.790789] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.790946] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] barbican_service_user.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.791111] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.approle_role_id = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.791268] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.approle_secret_id = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.791437] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.kv_mountpoint = secret {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.791595] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.kv_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.791756] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.kv_version = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.791914] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.namespace = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.792085] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.root_token_id = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.792246] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.ssl_ca_crt_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.792409] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.timeout = 60.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.792569] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.use_ssl = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.792738] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.792905] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.793068] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.793226] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.793380] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.793541] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.793697] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.793854] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.794012] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.794173] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.794329] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.794486] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.794639] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.794795] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.794953] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.795109] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.795277] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.service_type = identity {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.795438] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.795594] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.795750] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.795904] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.796105] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.796269] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] keystone.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.796471] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.connection_uri = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.796632] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.cpu_mode = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.796796] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.796965] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.cpu_models = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.797136] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.cpu_power_governor_high = performance {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.797305] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.797468] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.cpu_power_management = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.797638] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.797827] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.device_detach_attempts = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.798001] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.device_detach_timeout = 20 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.798170] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.disk_cachemodes = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.798329] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.disk_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.798492] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.enabled_perf_events = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.798653] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.file_backed_memory = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.798838] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.gid_maps = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.799016] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.hw_disk_discard = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.799180] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.hw_machine_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.799385] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.images_rbd_ceph_conf = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.799554] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.799721] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.799890] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.images_rbd_glance_store_name = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.800075] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.images_rbd_pool = rbd {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.800251] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.images_type = default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.800411] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.images_volume_group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.800576] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.inject_key = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.800738] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.inject_partition = -2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.800901] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.inject_password = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.801065] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.iscsi_iface = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.801225] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.iser_use_multipath = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.801390] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.801553] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.801716] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_downtime = 500 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.801879] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.802039] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.802198] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_inbound_addr = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.802388] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.802555] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.802720] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_scheme = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.802898] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_timeout_action = abort {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.803064] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_tunnelled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.803220] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_uri = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.803379] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.live_migration_with_native_tls = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.803536] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.max_queues = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.803698] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.803927] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.804103] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.nfs_mount_options = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.804770] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.804962] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.805137] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.805325] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.805511] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.805680] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.num_pcie_ports = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.805848] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.806016] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.pmem_namespaces = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.806177] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.quobyte_client_cfg = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.806477] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.806654] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.806820] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.806988] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.807148] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rbd_secret_uuid = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.807306] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rbd_user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.807469] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.807639] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.807801] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rescue_image_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.807991] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rescue_kernel_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.808175] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rescue_ramdisk_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.808383] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.808543] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.rx_queue_size = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.808716] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.smbfs_mount_options = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.809028] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.809208] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.snapshot_compression = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.809371] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.snapshot_image_format = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.809589] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.809757] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.sparse_logical_volumes = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.809921] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.swtpm_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.810091] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.swtpm_group = tss {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.810256] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.swtpm_user = tss {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.810425] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.sysinfo_serial = unique {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.810581] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.tb_cache_size = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.810737] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.tx_queue_size = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.810900] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.uid_maps = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.811065] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.use_virtio_for_bridges = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.811246] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.virt_type = kvm {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.811442] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.volume_clear = zero {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.811611] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.volume_clear_size = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.811779] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.volume_use_multipath = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.811940] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.vzstorage_cache_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.812128] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.812299] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.812465] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.812632] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.812908] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.813088] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.vzstorage_mount_user = stack {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.813254] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.813427] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.813598] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.auth_type = password {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.813761] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.813923] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.814084] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.814243] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.814429] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.814604] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.default_floating_pool = public {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.814766] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.814929] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.extension_sync_interval = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.815092] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.http_retries = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.815276] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.815441] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.815602] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.815798] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.815975] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.816160] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.ovs_bridge = br-int {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.816328] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.physnets = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.816496] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.region_name = RegionOne {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.816655] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.816822] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.service_metadata_proxy = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.816980] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.817145] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.service_type = network {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.817322] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.817500] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.817661] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.817838] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.818028] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.818192] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] neutron.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.818367] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] notifications.bdms_in_notifications = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.818543] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] notifications.default_level = INFO {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.818717] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] notifications.notification_format = unversioned {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.818884] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] notifications.notify_on_state_change = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.819061] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.819237] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] pci.alias = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.819405] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] pci.device_spec = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.819616] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] pci.report_in_placement = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.819737] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.819908] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.auth_type = password {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.820090] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.820287] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.820452] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.820614] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.820773] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.820932] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.821092] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.default_domain_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.821249] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.default_domain_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.821405] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.domain_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.821561] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.domain_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.821717] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.821879] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.822037] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.822192] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.822347] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.822513] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.password = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.822671] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.project_domain_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.822836] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.project_domain_name = Default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.823001] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.project_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.823185] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.project_name = service {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.823371] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.region_name = RegionOne {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.823537] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.823695] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.823862] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.service_type = placement {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.824041] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.824207] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.824365] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.824525] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.system_scope = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.824681] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.824838] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.trust_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.824997] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.user_domain_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.825163] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.user_domain_name = Default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.825323] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.user_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.825495] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.username = nova {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.825675] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.825834] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] placement.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.826011] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.cores = 20 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.826177] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.count_usage_from_placement = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.826373] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.826548] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.injected_file_content_bytes = 10240 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.826716] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.injected_file_path_length = 255 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.826883] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.injected_files = 5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.827053] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.instances = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.827214] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.key_pairs = 100 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.827378] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.metadata_items = 128 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.827542] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.ram = 51200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.827705] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.recheck_quota = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.827905] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.server_group_members = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.828102] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] quota.server_groups = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.828281] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.828448] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.828612] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.image_metadata_prefilter = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.828774] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.828961] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.max_attempts = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.829138] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.max_placement_results = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.829332] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.829516] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.829736] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.829858] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] scheduler.workers = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.830032] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.830201] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.830382] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.830550] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.830715] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.830879] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.831047] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.831234] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.831401] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.host_subset_size = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.831561] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.831721] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.831884] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.832063] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.isolated_hosts = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.832263] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.isolated_images = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.832430] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.832597] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.832759] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.832921] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.pci_in_placement = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.833085] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.833243] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.833402] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.833560] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.833717] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.833875] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.834035] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.track_instance_changes = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.834210] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.834378] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] metrics.required = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.834540] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] metrics.weight_multiplier = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.834699] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.834864] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] metrics.weight_setting = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.835196] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.835393] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] serial_console.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.835576] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] serial_console.port_range = 10000:20000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.835750] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.835920] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.836106] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] serial_console.serialproxy_port = 6083 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.836280] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.836451] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.auth_type = password {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.836611] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.836767] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.836929] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.837090] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.837247] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.837414] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.send_service_user_token = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.837576] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.837731] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] service_user.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.837929] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.agent_enabled = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.838103] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.838438] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.838678] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.838818] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.html5proxy_port = 6082 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.838982] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.image_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.839138] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.jpeg_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.839294] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.playback_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.839464] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.server_listen = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.839633] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.839851] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.streaming_mode = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.839953] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] spice.zlib_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.840130] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] upgrade_levels.baseapi = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.840301] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] upgrade_levels.compute = auto {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.840461] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] upgrade_levels.conductor = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.840616] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] upgrade_levels.scheduler = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.840785] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.840949] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.841110] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.841286] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.841465] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.841628] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.841786] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.841948] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.842107] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vendordata_dynamic_auth.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.842278] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.api_retry_count = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.842437] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.ca_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.842698] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.842874] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.cluster_name = testcl1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.843054] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.connection_pool_size = 10 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.843215] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.console_delay_seconds = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.843384] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.datastore_regex = ^datastore.* {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.843595] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.843768] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.host_password = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.843936] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.host_port = 443 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.844125] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.host_username = administrator@vsphere.local {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.844321] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.insecure = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.844496] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.integration_bridge = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.844663] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.maximum_objects = 100 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.844824] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.pbm_default_policy = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.844990] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.pbm_enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.845149] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.pbm_wsdl_location = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.845317] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.845477] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.serial_port_proxy_uri = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.845631] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.serial_port_service_uri = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.845800] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.task_poll_interval = 0.5 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.845971] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.use_linked_clone = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.846139] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.vnc_keymap = en-us {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.846304] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.vnc_port = 5900 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.846467] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vmware.vnc_port_total = 10000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.846654] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.auth_schemes = ['none'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.846849] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.847156] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.847346] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.847522] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.novncproxy_port = 6080 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.847701] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.server_listen = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.847900] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.848087] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.vencrypt_ca_certs = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.848251] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.vencrypt_client_cert = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.848409] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vnc.vencrypt_client_key = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.848583] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.848778] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.disable_deep_image_inspection = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.848969] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.849159] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.849325] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.849489] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.disable_rootwrap = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.849653] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.enable_numa_live_migration = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.849838] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.850028] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.850194] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.850358] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.libvirt_disable_apic = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.850520] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.850684] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.850845] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.851008] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.851169] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.851329] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.851489] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.851650] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.851811] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.851978] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.852176] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.852347] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.client_socket_timeout = 900 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.852514] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.default_pool_size = 1000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.852680] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.keep_alive = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.852855] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.max_header_line = 16384 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.853039] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.853203] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.ssl_ca_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.853363] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.ssl_cert_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.853522] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.ssl_key_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.853685] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.tcp_keepidle = 600 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.853857] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.854021] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] zvm.ca_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.854178] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] zvm.cloud_connector_url = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.854468] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.854642] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] zvm.reachable_timeout = 300 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.854823] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.enforce_new_defaults = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.854998] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.enforce_scope = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.855173] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.policy_default_rule = default {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.855351] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.855523] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.policy_file = policy.yaml {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.855695] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.855861] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.856079] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.856249] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.856416] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.856585] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.856760] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.856946] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.connection_string = messaging:// {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.857124] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.enabled = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.857295] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.es_doc_type = notification {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.857460] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.es_scroll_size = 10000 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.857627] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.es_scroll_time = 2m {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.857797] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.filter_error_trace = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.857989] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.hmac_keys = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.858165] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.sentinel_service_name = mymaster {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.858332] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.socket_timeout = 0.1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.858494] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.trace_requests = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.858654] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler.trace_sqlalchemy = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.858877] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler_jaeger.process_tags = {} {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.859047] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler_jaeger.service_name_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.859222] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] profiler_otlp.service_name_prefix = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.859388] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] remote_debug.host = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.859549] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] remote_debug.port = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.859730] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.859894] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.860070] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.860231] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.860392] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.860549] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.860708] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.860867] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.861028] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.861195] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.861352] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.861518] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.861682] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.861851] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.862047] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.862225] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.862389] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.862564] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.862727] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.862888] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.863054] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.863216] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.863378] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.863544] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.863705] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.863868] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.864050] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.864219] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.864388] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.864555] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.ssl = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.864729] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.864900] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.865092] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.865269] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.865439] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.865602] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.865792] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.865955] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_notifications.retry = -1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.866138] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.866311] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.866482] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.auth_section = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.866644] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.auth_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.866799] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.cafile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.866954] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.certfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.867113] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.collect_timing = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.867267] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.connect_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.867420] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.connect_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.867574] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.endpoint_id = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.867728] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.endpoint_override = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.867916] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.insecure = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.868113] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.keyfile = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.868276] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.max_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.868432] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.min_version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.868586] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.region_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.868744] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.retriable_status_codes = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.868976] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.service_name = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.869093] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.service_type = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.869254] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.split_loggers = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.869407] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.status_code_retries = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.869561] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.status_code_retry_delay = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.869714] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.timeout = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.869869] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.valid_interfaces = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870024] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_limit.version = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870213] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_reports.file_event_handler = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870390] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870547] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] oslo_reports.log_dir = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870715] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.870875] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871035] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871197] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871358] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871514] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871682] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.871839] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_ovs_privileged.group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872042] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872184] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872346] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872505] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] vif_plug_ovs_privileged.user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872674] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.872854] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873027] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873214] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873402] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873569] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873735] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.873899] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874078] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874246] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_ovs.isolate_vif = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874418] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874581] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874750] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.874918] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875080] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_vif_ovs.per_port_bridge = False {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875245] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] os_brick.lock_path = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875411] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] privsep_osbrick.capabilities = [21] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875567] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] privsep_osbrick.group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875725] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] privsep_osbrick.helper_command = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.875887] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876062] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876242] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] privsep_osbrick.user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876427] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876584] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] nova_sys_admin.group = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876738] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] nova_sys_admin.helper_command = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.876898] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877060] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877214] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] nova_sys_admin.user = None {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 494.877348] env[62109]: DEBUG oslo_service.service [None req-b62ee415-8d10-4066-b56a-47fa2d1a0247 None None] ******************************************************************************** {{(pid=62109) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 494.877825] env[62109]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 494.878690] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg 69cd516fd7b44b51ba30924a276d880f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 494.887543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69cd516fd7b44b51ba30924a276d880f [ 495.381572] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Getting list of instances from cluster (obj){ [ 495.381572] env[62109]: value = "domain-c8" [ 495.381572] env[62109]: _type = "ClusterComputeResource" [ 495.381572] env[62109]: } {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 495.382646] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c49ee1a-7429-48be-96bd-fa4e1277258a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.391566] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Got total of 0 instances {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 495.392144] env[62109]: WARNING nova.virt.vmwareapi.driver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 495.392603] env[62109]: INFO nova.virt.node [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Generated node identity 5d099501-5ecf-4ee9-ac08-22024ac3c80e [ 495.392843] env[62109]: INFO nova.virt.node [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Wrote node identity 5d099501-5ecf-4ee9-ac08-22024ac3c80e to /opt/stack/data/n-cpu-1/compute_id [ 495.393258] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg 071df0a6e95546d1b78d01834e67bcaf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 495.404950] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 071df0a6e95546d1b78d01834e67bcaf [ 495.896264] env[62109]: WARNING nova.compute.manager [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Compute nodes ['5d099501-5ecf-4ee9-ac08-22024ac3c80e'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 495.897002] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg a69c3fe0748b4561aea089d4817c7c10 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 495.921726] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a69c3fe0748b4561aea089d4817c7c10 [ 496.399366] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg 24081c169db9436c9c60d675a9a70174 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 496.410921] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24081c169db9436c9c60d675a9a70174 [ 496.901984] env[62109]: INFO nova.compute.manager [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 496.902443] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg ff01161bfc36465e83de18c32f7c5ce3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 496.913322] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff01161bfc36465e83de18c32f7c5ce3 [ 497.405537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg 0d410599c9ce4681ae67e8af01d531e9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 497.416640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d410599c9ce4681ae67e8af01d531e9 [ 497.907691] env[62109]: WARNING nova.compute.manager [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 497.908021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 497.908277] env[62109]: DEBUG oslo_concurrency.lockutils [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 497.908428] env[62109]: DEBUG oslo_concurrency.lockutils [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 497.908581] env[62109]: DEBUG nova.compute.resource_tracker [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 497.909450] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b5d153-dffd-44af-9e0e-fe005afb427d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.917717] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69ac303-59fb-4454-a7a9-5faa73608eb5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.932574] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4697d2-f0e4-4e7d-b5d2-4001d15ccf71 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.938724] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23aaeb0b-9f65-491d-bcda-b7ca4eb8d309 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.966464] env[62109]: DEBUG nova.compute.resource_tracker [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181710MB free_disk=124GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 497.966635] env[62109]: DEBUG oslo_concurrency.lockutils [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 497.966814] env[62109]: DEBUG oslo_concurrency.lockutils [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 497.967163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg c209a50c89d7421fb4471e4a7608d4ff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 497.978170] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c209a50c89d7421fb4471e4a7608d4ff [ 498.469527] env[62109]: WARNING nova.compute.resource_tracker [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] No compute node record for cpu-1:5d099501-5ecf-4ee9-ac08-22024ac3c80e: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 5d099501-5ecf-4ee9-ac08-22024ac3c80e could not be found. [ 498.470748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg b0a3ee03410a4bdba3ae71553a81bf4a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 498.482354] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0a3ee03410a4bdba3ae71553a81bf4a [ 498.973915] env[62109]: INFO nova.compute.resource_tracker [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 5d099501-5ecf-4ee9-ac08-22024ac3c80e [ 498.974342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg 3b1ce78b7bb642f9a99b02e0fc37b4b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 498.985629] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b1ce78b7bb642f9a99b02e0fc37b4b8 [ 499.477243] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg 49f670dcb5ad4683ad84c48bb43b6bba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 499.496923] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49f670dcb5ad4683ad84c48bb43b6bba [ 499.980438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg 9adea4fd18f549e1b5dc7138f789cef5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 500.002105] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9adea4fd18f549e1b5dc7138f789cef5 [ 500.483288] env[62109]: DEBUG nova.compute.resource_tracker [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 500.483677] env[62109]: DEBUG nova.compute.resource_tracker [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 500.654722] env[62109]: INFO nova.scheduler.client.report [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] [req-bee30523-6175-4edd-a074-9089a554ef83] Created resource provider record via placement API for resource provider with UUID 5d099501-5ecf-4ee9-ac08-22024ac3c80e and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 500.671070] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4930cc57-071d-41be-b276-321a92322ef4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.678581] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54aeb512-f2e0-4e11-98ac-003569828d01 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.707962] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af52b815-b8d5-431a-a2e4-408e4141cbce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.714963] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f342c6-a687-4ac5-b236-9a40cba02891 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.727713] env[62109]: DEBUG nova.compute.provider_tree [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Updating inventory in ProviderTree for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 500.728336] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg 33d1e1f4c2774323b577746edc94d4d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 500.735536] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33d1e1f4c2774323b577746edc94d4d0 [ 501.294036] env[62109]: DEBUG nova.scheduler.client.report [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Updated inventory for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 501.294284] env[62109]: DEBUG nova.compute.provider_tree [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Updating resource provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e generation from 0 to 1 during operation: update_inventory {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 501.294420] env[62109]: DEBUG nova.compute.provider_tree [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Updating inventory in ProviderTree for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 501.392168] env[62109]: DEBUG nova.compute.provider_tree [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Updating resource provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e generation from 1 to 2 during operation: update_traits {{(pid=62109) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 501.394537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Expecting reply to msg f1f788a30a894d129cf87e4e150f89c6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 501.412415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1f788a30a894d129cf87e4e150f89c6 [ 501.897044] env[62109]: DEBUG nova.compute.resource_tracker [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 501.897387] env[62109]: DEBUG oslo_concurrency.lockutils [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.930s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 501.897566] env[62109]: DEBUG nova.service [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Creating RPC server for service compute {{(pid=62109) start /opt/stack/nova/nova/service.py:186}} [ 501.908576] env[62109]: INFO oslo.messaging._drivers.impl_rabbit [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Creating fanout queue: compute_fanout_abf0a7b19c214a7584d5e13de36f12f4 [ 501.911593] env[62109]: DEBUG nova.service [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] Join ServiceGroup membership for this service compute {{(pid=62109) start /opt/stack/nova/nova/service.py:203}} [ 501.911750] env[62109]: DEBUG nova.servicegroup.drivers.db [None req-69ddbcd4-e49f-4055-9b00-9c5a16b43565 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62109) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 502.913433] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 502.914909] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 580a559efd25474084487e9ff2cbdf75 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 502.924271] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 580a559efd25474084487e9ff2cbdf75 [ 503.416624] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Getting list of instances from cluster (obj){ [ 503.416624] env[62109]: value = "domain-c8" [ 503.416624] env[62109]: _type = "ClusterComputeResource" [ 503.416624] env[62109]: } {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 503.418236] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ec7f3b-08b7-43db-8d82-f91179908914 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.426973] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Got total of 0 instances {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 503.427371] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 503.427918] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Getting list of instances from cluster (obj){ [ 503.427918] env[62109]: value = "domain-c8" [ 503.427918] env[62109]: _type = "ClusterComputeResource" [ 503.427918] env[62109]: } {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 503.429020] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfeedc5-861a-418a-84ef-c7ecff6160bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.436893] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Got total of 0 instances {{(pid=62109) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 506.914108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b2cb1b91bf8e497cb7713b4d76069594 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 506.934415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2cb1b91bf8e497cb7713b4d76069594 [ 528.336120] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Acquiring lock "d27c729c-1991-41bc-b76f-9c519333330a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.336427] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Lock "d27c729c-1991-41bc-b76f-9c519333330a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.336792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 21c9e992df1a46a58a206d4a0c47376b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 528.365006] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21c9e992df1a46a58a206d4a0c47376b [ 528.839573] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 528.841569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg b4b5b5bf21504f5ca17745c8e2f25e33 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 528.909043] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4b5b5bf21504f5ca17745c8e2f25e33 [ 529.373136] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.373418] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.374891] env[62109]: INFO nova.compute.claims [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 529.376593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 217ee46033dd4bd2abb7135a75a5e847 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 529.443993] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 217ee46033dd4bd2abb7135a75a5e847 [ 529.880460] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 326ed761ed774599ac5660690e43ad40 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 529.890561] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 326ed761ed774599ac5660690e43ad40 [ 530.411359] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3950dd0c-627d-415b-b9db-a0022c3661cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.419166] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289e5b33-0dd4-467e-aadd-f4819b3c668b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.449168] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e2541a-39af-4f25-a22b-fa94009497ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.456533] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bac9e26-a278-47e5-8ccc-2ed2cb800555 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.470773] env[62109]: DEBUG nova.compute.provider_tree [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.471344] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 6d68922e47364106a8b0f0464f29e192 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 530.482939] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d68922e47364106a8b0f0464f29e192 [ 530.973710] env[62109]: DEBUG nova.scheduler.client.report [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 530.976108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg f9794079b6904659b3fe222a57fed62a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 530.989000] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9794079b6904659b3fe222a57fed62a [ 531.483740] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.484355] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 531.486018] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 49ab66d6789349b5ab1665fc51f66e60 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 531.540779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49ab66d6789349b5ab1665fc51f66e60 [ 531.989555] env[62109]: DEBUG nova.compute.utils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 531.990299] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 8a517b898088462ba39757a348f7f071 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 531.991170] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 531.991392] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 532.004448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a517b898088462ba39757a348f7f071 [ 532.497368] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 532.499175] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 10de2006349644f6a5418cff4621c762 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 532.551678] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10de2006349644f6a5418cff4621c762 [ 533.005028] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 9790f27246584603a82d73acc48dd559 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 533.053189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9790f27246584603a82d73acc48dd559 [ 533.509795] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 534.675773] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 534.676095] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 534.676204] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 534.676384] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 534.676525] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 534.679551] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 534.679937] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 534.680057] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 534.680454] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 534.680782] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 534.680846] env[62109]: DEBUG nova.virt.hardware [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 534.681718] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8684e0b-732f-4652-8876-09202cbbb5ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.701689] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35085d17-9163-45f3-99f1-dd706cb85ec8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.721853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614ffdc0-f935-4397-b285-b64392570b96 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.803451] env[62109]: DEBUG nova.policy [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5b4439dac21460ea2f4479a3b0bc37c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23c75f6fd4fd411d86839e1ac7c8c961', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 535.412058] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Successfully created port: 89cd610f-a172-46da-825b-a2a0b6b1be90 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 537.974122] env[62109]: ERROR nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. [ 537.974122] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 537.974122] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.974122] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 537.974122] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 537.974122] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 537.974122] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 537.974122] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 537.974122] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.974122] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 537.974122] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.974122] env[62109]: ERROR nova.compute.manager raise self.value [ 537.974122] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 537.974122] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 537.974122] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.974122] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 537.974793] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.974793] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 537.974793] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. [ 537.974793] env[62109]: ERROR nova.compute.manager [ 537.974793] env[62109]: Traceback (most recent call last): [ 537.974793] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 537.974793] env[62109]: listener.cb(fileno) [ 537.974793] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.974793] env[62109]: result = function(*args, **kwargs) [ 537.974793] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 537.974793] env[62109]: return func(*args, **kwargs) [ 537.974793] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 537.974793] env[62109]: raise e [ 537.974793] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.974793] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 537.974793] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 537.974793] env[62109]: created_port_ids = self._update_ports_for_instance( [ 537.974793] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 537.974793] env[62109]: with excutils.save_and_reraise_exception(): [ 537.974793] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.974793] env[62109]: self.force_reraise() [ 537.974793] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.974793] env[62109]: raise self.value [ 537.974793] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 537.974793] env[62109]: updated_port = self._update_port( [ 537.974793] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.974793] env[62109]: _ensure_no_port_binding_failure(port) [ 537.974793] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.974793] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 537.975511] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. [ 537.975511] env[62109]: Removing descriptor: 14 [ 537.976378] env[62109]: ERROR nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] Traceback (most recent call last): [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] yield resources [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self.driver.spawn(context, instance, image_meta, [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] vm_ref = self.build_virtual_machine(instance, [ 537.976378] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] vif_infos = vmwarevif.get_vif_info(self._session, [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] for vif in network_info: [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] return self._sync_wrapper(fn, *args, **kwargs) [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self.wait() [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self[:] = self._gt.wait() [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] return self._exit_event.wait() [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 537.976770] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] result = hub.switch() [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] return self.greenlet.switch() [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] result = function(*args, **kwargs) [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] return func(*args, **kwargs) [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] raise e [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] nwinfo = self.network_api.allocate_for_instance( [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] created_port_ids = self._update_ports_for_instance( [ 537.977112] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] with excutils.save_and_reraise_exception(): [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self.force_reraise() [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] raise self.value [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] updated_port = self._update_port( [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] _ensure_no_port_binding_failure(port) [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] raise exception.PortBindingFailed(port_id=port['id']) [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] nova.exception.PortBindingFailed: Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. [ 537.977558] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] [ 537.977945] env[62109]: INFO nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Terminating instance [ 537.979945] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Acquiring lock "refresh_cache-d27c729c-1991-41bc-b76f-9c519333330a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.980214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Acquired lock "refresh_cache-d27c729c-1991-41bc-b76f-9c519333330a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.980310] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 537.980866] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 7544d1694cd84fa9811dada505400fd7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 537.990451] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7544d1694cd84fa9811dada505400fd7 [ 538.519553] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.623451] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.623972] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 7a314ef8d1eb443cb16df2598e2b5f7f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 538.634078] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a314ef8d1eb443cb16df2598e2b5f7f [ 539.126074] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Releasing lock "refresh_cache-d27c729c-1991-41bc-b76f-9c519333330a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.126504] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 539.126685] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 539.127002] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc6723d3-c75e-412f-a025-790f0af22e2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.136223] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b0b0fb-b0d6-4f0d-8cf5-3a8999391b5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.159258] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d27c729c-1991-41bc-b76f-9c519333330a could not be found. [ 539.159474] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 539.159862] env[62109]: INFO nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 539.160168] env[62109]: DEBUG oslo.service.loopingcall [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.160372] env[62109]: DEBUG nova.compute.manager [-] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 539.160460] env[62109]: DEBUG nova.network.neutron [-] [instance: d27c729c-1991-41bc-b76f-9c519333330a] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 539.234955] env[62109]: DEBUG nova.network.neutron [-] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.235588] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b7b149e31e1e4359a815e71fbe929f3d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 539.246445] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7b149e31e1e4359a815e71fbe929f3d [ 539.738601] env[62109]: DEBUG nova.network.neutron [-] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.739120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e83042efdf8c4ae08bd3c3747d99b6a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 539.749157] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e83042efdf8c4ae08bd3c3747d99b6a9 [ 540.243744] env[62109]: INFO nova.compute.manager [-] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Took 1.08 seconds to deallocate network for instance. [ 540.245470] env[62109]: DEBUG nova.compute.claims [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 540.245652] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.245870] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.248227] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg ba80a50641c14874a638b3af984bd07b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 540.292877] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba80a50641c14874a638b3af984bd07b [ 540.802187] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea8fdbc-5cc0-4b0c-bba0-c15aa2584740 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.807327] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c13d83-fc58-4865-9901-51c04a06ab30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.843273] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ffa7d1-b317-4a82-a9df-a333ca109421 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.851994] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9539abe2-8a20-447b-b9c7-2418c27dac0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.870393] env[62109]: DEBUG nova.compute.provider_tree [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.871185] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg ccf925d24801483c9d52c9d7ff6b3333 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 540.893752] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccf925d24801483c9d52c9d7ff6b3333 [ 540.972393] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "67c74993-5779-48fb-9a8c-8904a857d22a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.972897] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "67c74993-5779-48fb-9a8c-8904a857d22a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.973308] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg f7164becf2aa4b53a763b78af8b6d59c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 541.089695] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7164becf2aa4b53a763b78af8b6d59c [ 541.374035] env[62109]: DEBUG nova.scheduler.client.report [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 541.376347] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 62a0dfdda04940ef8cbc4ff171b12f07 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 541.388223] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62a0dfdda04940ef8cbc4ff171b12f07 [ 541.475784] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 541.477621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 2bcfa3dc08d047df8352bef6aae064aa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 541.523149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bcfa3dc08d047df8352bef6aae064aa [ 541.878720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.633s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.879674] env[62109]: ERROR nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] Traceback (most recent call last): [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self.driver.spawn(context, instance, image_meta, [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] vm_ref = self.build_virtual_machine(instance, [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.879674] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] for vif in network_info: [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] return self._sync_wrapper(fn, *args, **kwargs) [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self.wait() [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self[:] = self._gt.wait() [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] return self._exit_event.wait() [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] result = hub.switch() [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.880043] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] return self.greenlet.switch() [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] result = function(*args, **kwargs) [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] return func(*args, **kwargs) [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] raise e [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] nwinfo = self.network_api.allocate_for_instance( [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] created_port_ids = self._update_ports_for_instance( [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] with excutils.save_and_reraise_exception(): [ 541.880467] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] self.force_reraise() [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] raise self.value [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] updated_port = self._update_port( [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] _ensure_no_port_binding_failure(port) [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] raise exception.PortBindingFailed(port_id=port['id']) [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] nova.exception.PortBindingFailed: Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. [ 541.880802] env[62109]: ERROR nova.compute.manager [instance: d27c729c-1991-41bc-b76f-9c519333330a] [ 541.881102] env[62109]: DEBUG nova.compute.utils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 541.884201] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Build of instance d27c729c-1991-41bc-b76f-9c519333330a was re-scheduled: Binding failed for port 89cd610f-a172-46da-825b-a2a0b6b1be90, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 541.884436] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 541.884672] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Acquiring lock "refresh_cache-d27c729c-1991-41bc-b76f-9c519333330a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.884812] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Acquired lock "refresh_cache-d27c729c-1991-41bc-b76f-9c519333330a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.884958] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 541.885370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 8b2f7c9a069448b9ad0e7494fa4c2f64 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 541.892793] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b2f7c9a069448b9ad0e7494fa4c2f64 [ 541.997827] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.998098] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.999644] env[62109]: INFO nova.compute.claims [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.001284] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 5e24498cd4de4fbcadda3ad0b8166cd3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 542.074347] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e24498cd4de4fbcadda3ad0b8166cd3 [ 542.382526] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "3888f958-a6ea-4159-b2be-1b2b5781f41f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.382526] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "3888f958-a6ea-4159-b2be-1b2b5781f41f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.382526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg b035f54ec0674a6ca77e317e2069271e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 542.397223] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b035f54ec0674a6ca77e317e2069271e [ 542.425732] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.504839] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 273c047e23054787b7c51a4d7780119a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 542.516933] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 273c047e23054787b7c51a4d7780119a [ 542.538099] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.539287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 94cdc95c33344a9697b7582144b1d7e9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 542.551181] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94cdc95c33344a9697b7582144b1d7e9 [ 542.884519] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 542.886432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 73c19f0366aa48e492caa4c64e860dbb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 542.956222] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73c19f0366aa48e492caa4c64e860dbb [ 543.041884] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Releasing lock "refresh_cache-d27c729c-1991-41bc-b76f-9c519333330a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.042834] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 543.042834] env[62109]: DEBUG nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 543.042834] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 543.077889] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.078860] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 1196d423f83a4c6b9a2926e138022cf7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 543.089764] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1196d423f83a4c6b9a2926e138022cf7 [ 543.114639] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a632c3-1c21-4d09-b149-1fd903c25a1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.122458] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf0ae75-a30b-4911-9152-2152f5cbd8c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.152663] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f066da-e0d0-464c-a69d-c21e581bc071 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.160616] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705187f1-46ac-4a0d-a92b-5e35dab283df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.174042] env[62109]: DEBUG nova.compute.provider_tree [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.174569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 1b3f872284014e34b4fd21ce14e7c136 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 543.182145] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b3f872284014e34b4fd21ce14e7c136 [ 543.407046] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.583810] env[62109]: DEBUG nova.network.neutron [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.584377] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 1d2f0a54a7854de292d6b8da4aef144c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 543.595799] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d2f0a54a7854de292d6b8da4aef144c [ 543.677596] env[62109]: DEBUG nova.scheduler.client.report [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.680068] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg a83631bb67ce414d8aab88b5fc4d1725 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 543.691621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a83631bb67ce414d8aab88b5fc4d1725 [ 544.087710] env[62109]: INFO nova.compute.manager [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] [instance: d27c729c-1991-41bc-b76f-9c519333330a] Took 1.05 seconds to deallocate network for instance. [ 544.089578] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg f5f153281afd45838e43dcc92d251db1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 544.135338] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5f153281afd45838e43dcc92d251db1 [ 544.188032] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.184s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.188032] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 544.188032] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg c0301467ec014d60a2b300d2930f422f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 544.188032] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.779s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.191247] env[62109]: INFO nova.compute.claims [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.192815] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 9ac439c5530a42a59526e1eaaa2ca4fb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 544.239699] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0301467ec014d60a2b300d2930f422f [ 544.247337] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ac439c5530a42a59526e1eaaa2ca4fb [ 544.594268] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg 6f63be92f2664ee4aa6dcfe7ff2df7d2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 544.635292] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f63be92f2664ee4aa6dcfe7ff2df7d2 [ 544.697404] env[62109]: DEBUG nova.compute.utils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.698189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg d19c891eab7841dabdf008506668beac in queue reply_7522b64acfeb4981b1f36928b040d568 [ 544.699132] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 544.704384] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.705814] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg e680a894878e461dada8a73601a815f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 544.716094] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d19c891eab7841dabdf008506668beac [ 544.718899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e680a894878e461dada8a73601a815f7 [ 544.871843] env[62109]: DEBUG nova.policy [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4ea22bb21004f69a2b27d306493db45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '423f777bec3c474a91970fce3e308097', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 545.128724] env[62109]: INFO nova.scheduler.client.report [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Deleted allocations for instance d27c729c-1991-41bc-b76f-9c519333330a [ 545.138119] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Expecting reply to msg f7a2d24248c74b03abcbea6797d08d2e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 545.169184] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7a2d24248c74b03abcbea6797d08d2e [ 545.205149] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 545.207039] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg b4b8a809634543399426a2866ae85790 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 545.244337] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4b8a809634543399426a2866ae85790 [ 545.267154] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22472b8e-8aa3-4aab-ba94-b0f434f1ad39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.274814] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f68071-e2b9-43a2-bac2-816b72fdd8f5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.313245] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35a773f-cbaa-4f94-90ce-06d43341571f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.321546] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6bbb0e-4404-47de-b9ec-a76232cccd5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.336116] env[62109]: DEBUG nova.compute.provider_tree [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.336116] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg d2aeb9ff3df74a34a37db54e299ebfd8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 545.348764] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2aeb9ff3df74a34a37db54e299ebfd8 [ 545.640427] env[62109]: DEBUG oslo_concurrency.lockutils [None req-58b2eba5-476b-4f74-88b6-9834b8ca2aab tempest-TenantUsagesTestJSON-1981865800 tempest-TenantUsagesTestJSON-1981865800-project-member] Lock "d27c729c-1991-41bc-b76f-9c519333330a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.304s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.712400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg de6a5e20adaa4e848e461d53d8b616d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 545.753191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de6a5e20adaa4e848e461d53d8b616d7 [ 545.840193] env[62109]: DEBUG nova.scheduler.client.report [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.842644] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 0568313151d44360b08c3f16d8c31f60 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 545.855137] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0568313151d44360b08c3f16d8c31f60 [ 545.867166] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Successfully created port: 9411c776-3d75-4688-8670-d99f84c3a7e9 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.220748] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 546.290566] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.290706] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.290860] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.291039] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.291200] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.291390] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.291550] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.291702] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.291863] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.292710] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.292973] env[62109]: DEBUG nova.virt.hardware [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.294390] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec90adb-62d6-4bcd-8e68-fef9bade7ac5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.303536] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e956ec-1c26-4055-8cfa-fd604457eb7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.345639] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.160s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.346173] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 546.347888] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg b4e9807e6c924c40acde277aea1de8bd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 546.454886] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4e9807e6c924c40acde277aea1de8bd [ 546.854640] env[62109]: DEBUG nova.compute.utils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.855298] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 78c9621ea9444dbbade9a9d52454e95b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 546.856330] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 546.856412] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 546.890173] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78c9621ea9444dbbade9a9d52454e95b [ 547.131900] env[62109]: DEBUG nova.policy [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f67ad126d9354c9e9cd8ac79b829fb4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0e5d54143934f498a4a7849ec99a645', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 547.363773] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 547.363773] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 9a21811a5dee4b9eb6bbe249965e741b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 547.442099] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a21811a5dee4b9eb6bbe249965e741b [ 547.798846] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "834573d9-496f-4c80-b157-e1b12f799418" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.799114] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "834573d9-496f-4c80-b157-e1b12f799418" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.799578] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg c0cf1a48e779461eb62c4f5ea412d483 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 547.867599] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 6cfbbd02ff004f0cae895291ff6ae2e1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 547.890929] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0cf1a48e779461eb62c4f5ea412d483 [ 547.938165] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cfbbd02ff004f0cae895291ff6ae2e1 [ 548.301886] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 548.303784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg c9c0a0948bce4dcda62f809f82be7e5c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 548.370929] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 548.436988] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9c0a0948bce4dcda62f809f82be7e5c [ 548.451206] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Successfully created port: e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 548.547092] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 548.547349] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 548.547517] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.547785] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 548.547991] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.548201] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 548.548418] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 548.548573] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 548.548722] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 548.548886] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 548.549038] env[62109]: DEBUG nova.virt.hardware [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 548.549955] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8888f0-e751-4766-a009-e1d4ab1b14c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.558635] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3689ac37-a390-4c74-894c-28e5c45ede16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.843883] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.844173] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.845729] env[62109]: INFO nova.compute.claims [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.847522] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg ce14443fa5f640808b7d5acd133287cb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 548.938169] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce14443fa5f640808b7d5acd133287cb [ 549.109199] env[62109]: ERROR nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. [ 549.109199] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 549.109199] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.109199] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 549.109199] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.109199] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 549.109199] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.109199] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 549.109199] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.109199] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 549.109199] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.109199] env[62109]: ERROR nova.compute.manager raise self.value [ 549.109199] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.109199] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 549.109199] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.109199] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 549.109705] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.109705] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 549.109705] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. [ 549.109705] env[62109]: ERROR nova.compute.manager [ 549.109705] env[62109]: Traceback (most recent call last): [ 549.109705] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 549.109705] env[62109]: listener.cb(fileno) [ 549.109705] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.109705] env[62109]: result = function(*args, **kwargs) [ 549.109705] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.109705] env[62109]: return func(*args, **kwargs) [ 549.109705] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.109705] env[62109]: raise e [ 549.109705] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.109705] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 549.109705] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.109705] env[62109]: created_port_ids = self._update_ports_for_instance( [ 549.109705] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.109705] env[62109]: with excutils.save_and_reraise_exception(): [ 549.109705] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.109705] env[62109]: self.force_reraise() [ 549.109705] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.109705] env[62109]: raise self.value [ 549.109705] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.109705] env[62109]: updated_port = self._update_port( [ 549.109705] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.109705] env[62109]: _ensure_no_port_binding_failure(port) [ 549.109705] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.109705] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 549.110523] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. [ 549.110523] env[62109]: Removing descriptor: 14 [ 549.110523] env[62109]: ERROR nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Traceback (most recent call last): [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] yield resources [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self.driver.spawn(context, instance, image_meta, [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.110523] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] vm_ref = self.build_virtual_machine(instance, [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] for vif in network_info: [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] return self._sync_wrapper(fn, *args, **kwargs) [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self.wait() [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self[:] = self._gt.wait() [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] return self._exit_event.wait() [ 549.110920] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] result = hub.switch() [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] return self.greenlet.switch() [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] result = function(*args, **kwargs) [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] return func(*args, **kwargs) [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] raise e [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] nwinfo = self.network_api.allocate_for_instance( [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.111373] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] created_port_ids = self._update_ports_for_instance( [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] with excutils.save_and_reraise_exception(): [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self.force_reraise() [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] raise self.value [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] updated_port = self._update_port( [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] _ensure_no_port_binding_failure(port) [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.111728] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] raise exception.PortBindingFailed(port_id=port['id']) [ 549.112157] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] nova.exception.PortBindingFailed: Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. [ 549.112157] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] [ 549.112157] env[62109]: INFO nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Terminating instance [ 549.112447] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.112595] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquired lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.112769] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 549.113212] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 0982636c4f174ad6aaa714f9361aab18 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 549.122267] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0982636c4f174ad6aaa714f9361aab18 [ 549.352134] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg bba10aa0993349188e2911ad6305e497 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 549.371732] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bba10aa0993349188e2911ad6305e497 [ 549.634070] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.945521] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea78a1a-774e-4f0e-a0a8-331718bdc56f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.955586] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2956c3-339e-4471-a544-63864ca295b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.982214] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c63b8b-5c82-4af0-9412-97698f9df28a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.989490] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0d2267-b94b-455f-ab0e-c80c18ba395d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.002273] env[62109]: DEBUG nova.compute.provider_tree [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.002779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 9f4eec6f87bf41669e6721fbcded245a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 550.018364] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f4eec6f87bf41669e6721fbcded245a [ 550.059169] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.060417] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg d84cb562268440a6813356ee5295e275 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 550.079206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d84cb562268440a6813356ee5295e275 [ 550.300037] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.300037] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.300037] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 550.300037] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 550.300037] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 518127ab61bd48fd9043fb0736a1b5d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 550.331460] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 518127ab61bd48fd9043fb0736a1b5d4 [ 550.365765] env[62109]: ERROR nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. [ 550.365765] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 550.365765] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.365765] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 550.365765] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.365765] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 550.365765] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.365765] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 550.365765] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.365765] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 550.365765] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.365765] env[62109]: ERROR nova.compute.manager raise self.value [ 550.365765] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.365765] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 550.365765] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.365765] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 550.366208] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.366208] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 550.366208] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. [ 550.366208] env[62109]: ERROR nova.compute.manager [ 550.366208] env[62109]: Traceback (most recent call last): [ 550.366208] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 550.366208] env[62109]: listener.cb(fileno) [ 550.366208] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.366208] env[62109]: result = function(*args, **kwargs) [ 550.366208] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.366208] env[62109]: return func(*args, **kwargs) [ 550.366208] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.366208] env[62109]: raise e [ 550.366208] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.366208] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 550.366208] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.366208] env[62109]: created_port_ids = self._update_ports_for_instance( [ 550.366208] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.366208] env[62109]: with excutils.save_and_reraise_exception(): [ 550.366208] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.366208] env[62109]: self.force_reraise() [ 550.366208] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.366208] env[62109]: raise self.value [ 550.366208] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.366208] env[62109]: updated_port = self._update_port( [ 550.366208] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.366208] env[62109]: _ensure_no_port_binding_failure(port) [ 550.366208] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.366208] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 550.366871] env[62109]: nova.exception.PortBindingFailed: Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. [ 550.366871] env[62109]: Removing descriptor: 16 [ 550.366871] env[62109]: ERROR nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Traceback (most recent call last): [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] yield resources [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self.driver.spawn(context, instance, image_meta, [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.366871] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] vm_ref = self.build_virtual_machine(instance, [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] for vif in network_info: [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] return self._sync_wrapper(fn, *args, **kwargs) [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self.wait() [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self[:] = self._gt.wait() [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] return self._exit_event.wait() [ 550.367177] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] result = hub.switch() [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] return self.greenlet.switch() [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] result = function(*args, **kwargs) [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] return func(*args, **kwargs) [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] raise e [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] nwinfo = self.network_api.allocate_for_instance( [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.367485] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] created_port_ids = self._update_ports_for_instance( [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] with excutils.save_and_reraise_exception(): [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self.force_reraise() [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] raise self.value [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] updated_port = self._update_port( [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] _ensure_no_port_binding_failure(port) [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.367954] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] raise exception.PortBindingFailed(port_id=port['id']) [ 550.368287] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] nova.exception.PortBindingFailed: Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. [ 550.368287] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] [ 550.368287] env[62109]: INFO nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Terminating instance [ 550.369888] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.370089] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquired lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.370286] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 550.370729] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg b4119497fe514eb5bf67285bc26e8cf2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 550.385044] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4119497fe514eb5bf67285bc26e8cf2 [ 550.505466] env[62109]: DEBUG nova.scheduler.client.report [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 550.507748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg ac20ab1c06be419fa46f1fdc7a4aec5d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 550.535936] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac20ab1c06be419fa46f1fdc7a4aec5d [ 550.562149] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Releasing lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.562567] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 550.562748] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 550.563029] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3eacc35a-1b6b-47d9-8076-2672231f4e65 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.572614] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1918818a-8d63-418a-b7a4-f667364490be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.594259] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 67c74993-5779-48fb-9a8c-8904a857d22a could not be found. [ 550.594475] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 550.594649] env[62109]: INFO nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 550.594884] env[62109]: DEBUG oslo.service.loopingcall [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.595090] env[62109]: DEBUG nova.compute.manager [-] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 550.595176] env[62109]: DEBUG nova.network.neutron [-] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 550.625382] env[62109]: DEBUG nova.network.neutron [-] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.625925] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 451b01c346054d008fec3878f9869a29 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 550.633246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 451b01c346054d008fec3878f9869a29 [ 550.708081] env[62109]: DEBUG nova.compute.manager [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Received event network-changed-9411c776-3d75-4688-8670-d99f84c3a7e9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 550.708277] env[62109]: DEBUG nova.compute.manager [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Refreshing instance network info cache due to event network-changed-9411c776-3d75-4688-8670-d99f84c3a7e9. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 550.708482] env[62109]: DEBUG oslo_concurrency.lockutils [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] Acquiring lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.708617] env[62109]: DEBUG oslo_concurrency.lockutils [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] Acquired lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.708764] env[62109]: DEBUG nova.network.neutron [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Refreshing network info cache for port 9411c776-3d75-4688-8670-d99f84c3a7e9 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 550.709220] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] Expecting reply to msg de4933c58d0646b49ccf32d2696672cc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 550.717306] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de4933c58d0646b49ccf32d2696672cc [ 550.804229] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 550.804405] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 550.804553] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 550.804676] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Didn't find any instances for network info cache update. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 550.804864] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.805104] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.805346] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.805470] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.805667] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.805830] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.805990] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 550.806133] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 550.806476] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 90d2dfe638cb4bf3adc2084f6d92f1de in queue reply_7522b64acfeb4981b1f36928b040d568 [ 550.833620] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90d2dfe638cb4bf3adc2084f6d92f1de [ 550.904989] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.010270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.166s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.010786] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 551.012625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 70318ea755e246dfb5a210b4bdfc86e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 551.027181] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.027646] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg e7aa5353f4fe43f884d2b4f4c36cd2bf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 551.036708] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7aa5353f4fe43f884d2b4f4c36cd2bf [ 551.086139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70318ea755e246dfb5a210b4bdfc86e2 [ 551.128661] env[62109]: DEBUG nova.network.neutron [-] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.129162] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4eb3f21723854141a85b66f120461d9c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 551.140594] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4eb3f21723854141a85b66f120461d9c [ 551.226505] env[62109]: DEBUG nova.network.neutron [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.312643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.313036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.313036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.313208] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 551.317306] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc022203-eb00-448d-b7c5-c70ce98cb2f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.324999] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f09d368-a4cc-477e-9231-83aef05bd2cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.343156] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790aa9be-476a-4d73-9bf4-b40acebcc21d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.347255] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07521eb-6b3d-443d-9e71-eba897880f56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.388925] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181706MB free_disk=124GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 551.389482] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.389796] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.390755] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 63ce1295b65d48c190201209d6cecfca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 551.392469] env[62109]: DEBUG nova.network.neutron [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.393085] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] Expecting reply to msg 6c8d36fda7cb4a39bf824a14970c7aa0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 551.411621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c8d36fda7cb4a39bf824a14970c7aa0 [ 551.442739] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63ce1295b65d48c190201209d6cecfca [ 551.520938] env[62109]: DEBUG nova.compute.utils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.521580] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 9dc7c40f989b495c8b752ebc11eb9edb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 551.522489] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 551.522656] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 551.530659] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Releasing lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.531442] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 551.531669] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 551.532034] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55b3d139-5e0a-4207-8940-48b9a3f31c10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.545996] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2c7a01-a259-4d26-8598-981b1b2ebe83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.561916] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dc7c40f989b495c8b752ebc11eb9edb [ 551.570035] env[62109]: DEBUG nova.policy [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23ccfeaefb6445009a1e73e9e8c5d07b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a15f084eff34d2b840889a457bdaeb3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 551.576392] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3888f958-a6ea-4159-b2be-1b2b5781f41f could not be found. [ 551.576392] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 551.576567] env[62109]: INFO nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 551.576820] env[62109]: DEBUG oslo.service.loopingcall [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 551.577263] env[62109]: DEBUG nova.compute.manager [-] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 551.577357] env[62109]: DEBUG nova.network.neutron [-] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 551.614650] env[62109]: DEBUG nova.network.neutron [-] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.615150] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8fa7f535dafb4e7bb4ef6e05f1555071 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 551.623787] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fa7f535dafb4e7bb4ef6e05f1555071 [ 551.631446] env[62109]: INFO nova.compute.manager [-] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Took 1.04 seconds to deallocate network for instance. [ 551.636953] env[62109]: DEBUG nova.compute.claims [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 551.639365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.896586] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg a7e8eccf633b44fcada93248f4ae7d48 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 551.897584] env[62109]: DEBUG oslo_concurrency.lockutils [req-8f3b45d4-ba03-4bb9-b82b-d33f438f12ea req-3196c45a-edae-4bb8-98c2-4abd3d646ca0 service nova] Releasing lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.917301] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7e8eccf633b44fcada93248f4ae7d48 [ 552.025445] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 552.027302] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 756e68fdfb9f437cb06c79e07fca41be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 552.117563] env[62109]: DEBUG nova.network.neutron [-] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.118011] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bcc61de23c854ea1b67ae1a1106034be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 552.127286] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcc61de23c854ea1b67ae1a1106034be [ 552.257600] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Successfully created port: 051993b2-9892-4974-a10f-35a4bdf58ec2 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.283739] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 756e68fdfb9f437cb06c79e07fca41be [ 552.464863] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 67c74993-5779-48fb-9a8c-8904a857d22a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 552.464863] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3888f958-a6ea-4159-b2be-1b2b5781f41f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 552.464863] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 834573d9-496f-4c80-b157-e1b12f799418 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 552.464863] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 552.465061] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 552.539526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 98799086c2234c4195ad10f5ef89a094 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 552.549254] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc69540-6959-48ed-955a-ba56b642bcee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.562323] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7590f6-dee3-4ce6-a813-badf1f2f13b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.593777] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c616c60-2786-4b8e-a8da-108d399fdccb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.602107] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bc6f15-4cd8-4172-9c55-324dad6e258c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.622637] env[62109]: DEBUG nova.compute.provider_tree [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.622637] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg f9cfc56950854751b8ae86193f96c3dc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 552.622637] env[62109]: INFO nova.compute.manager [-] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Took 1.04 seconds to deallocate network for instance. [ 552.625940] env[62109]: DEBUG nova.compute.claims [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 552.626100] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.630401] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9cfc56950854751b8ae86193f96c3dc [ 552.744513] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98799086c2234c4195ad10f5ef89a094 [ 553.059266] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 553.111756] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.111756] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.111756] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.111974] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.111974] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.111974] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.111974] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.111974] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.112180] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.112180] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.112180] env[62109]: DEBUG nova.virt.hardware [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.112812] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b7edc1-e319-474a-a124-09ebf7591c9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.120674] env[62109]: DEBUG nova.scheduler.client.report [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 553.122887] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg fcbec4a4d3a64911986d4b00ef169007 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 553.126104] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ee5792-b824-4602-a0d8-baaa9608781e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.165854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcbec4a4d3a64911986d4b00ef169007 [ 553.625914] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 553.626143] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.236s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.626409] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.989s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.628639] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg ef42dd1be23f4f879fa36d169ba06c0d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 553.755339] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef42dd1be23f4f879fa36d169ba06c0d [ 553.888977] env[62109]: ERROR nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. [ 553.888977] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 553.888977] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.888977] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 553.888977] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.888977] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 553.888977] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.888977] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 553.888977] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.888977] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 553.888977] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.888977] env[62109]: ERROR nova.compute.manager raise self.value [ 553.888977] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.888977] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 553.888977] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.888977] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 553.889461] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.889461] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 553.889461] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. [ 553.889461] env[62109]: ERROR nova.compute.manager [ 553.889461] env[62109]: Traceback (most recent call last): [ 553.889461] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 553.889461] env[62109]: listener.cb(fileno) [ 553.889461] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.889461] env[62109]: result = function(*args, **kwargs) [ 553.889461] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.889461] env[62109]: return func(*args, **kwargs) [ 553.889461] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 553.889461] env[62109]: raise e [ 553.889461] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.889461] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 553.889461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.889461] env[62109]: created_port_ids = self._update_ports_for_instance( [ 553.889461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.889461] env[62109]: with excutils.save_and_reraise_exception(): [ 553.889461] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.889461] env[62109]: self.force_reraise() [ 553.889461] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.889461] env[62109]: raise self.value [ 553.889461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.889461] env[62109]: updated_port = self._update_port( [ 553.889461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.889461] env[62109]: _ensure_no_port_binding_failure(port) [ 553.889461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.889461] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 553.890310] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. [ 553.890310] env[62109]: Removing descriptor: 16 [ 553.890310] env[62109]: ERROR nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] Traceback (most recent call last): [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] yield resources [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self.driver.spawn(context, instance, image_meta, [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.890310] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] vm_ref = self.build_virtual_machine(instance, [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] for vif in network_info: [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] return self._sync_wrapper(fn, *args, **kwargs) [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self.wait() [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self[:] = self._gt.wait() [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] return self._exit_event.wait() [ 553.890696] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] result = hub.switch() [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] return self.greenlet.switch() [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] result = function(*args, **kwargs) [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] return func(*args, **kwargs) [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] raise e [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] nwinfo = self.network_api.allocate_for_instance( [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.891090] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] created_port_ids = self._update_ports_for_instance( [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] with excutils.save_and_reraise_exception(): [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self.force_reraise() [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] raise self.value [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] updated_port = self._update_port( [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] _ensure_no_port_binding_failure(port) [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.891483] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] raise exception.PortBindingFailed(port_id=port['id']) [ 553.891823] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] nova.exception.PortBindingFailed: Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. [ 553.891823] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] [ 553.891823] env[62109]: INFO nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Terminating instance [ 553.892727] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.892875] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquired lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.893031] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.893435] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 712b1041b8ee4c619a34037b804bf88b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 553.900340] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 712b1041b8ee4c619a34037b804bf88b [ 554.271212] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141506b2-a0a9-4bc8-9d94-16aa53ac530d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.282764] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfcf1dc-f9e1-4000-a3ca-91f8d44e56d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.319282] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a3bbef-0449-4ecb-a397-94570294a68e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.329445] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a49cf07-4f71-4aca-baef-e6044631a27f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.351306] env[62109]: DEBUG nova.compute.provider_tree [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 554.351829] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 5e7bbfad886c401595d4a7227514585f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 554.365442] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e7bbfad886c401595d4a7227514585f [ 554.431665] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.752628] env[62109]: DEBUG nova.compute.manager [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Received event network-changed-e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 554.752858] env[62109]: DEBUG nova.compute.manager [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Refreshing instance network info cache due to event network-changed-e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 554.753036] env[62109]: DEBUG oslo_concurrency.lockutils [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] Acquiring lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.753173] env[62109]: DEBUG oslo_concurrency.lockutils [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] Acquired lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.753328] env[62109]: DEBUG nova.network.neutron [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Refreshing network info cache for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 554.753740] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] Expecting reply to msg c888a40fc38f4ef18c4e3fbdaf6c105e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 554.769645] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c888a40fc38f4ef18c4e3fbdaf6c105e [ 554.795667] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.795667] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 7ab02edd9f2b478bb59ce80a1b455120 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 554.810550] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ab02edd9f2b478bb59ce80a1b455120 [ 554.856228] env[62109]: DEBUG nova.scheduler.client.report [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 554.857193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg ed8d155b6b484e52ab8c291fd386f8ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 554.897005] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed8d155b6b484e52ab8c291fd386f8ba [ 555.094760] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.094979] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.095429] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 1c1c825effc040b0955c2051e6f206d2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 555.143004] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c1c825effc040b0955c2051e6f206d2 [ 555.291016] env[62109]: DEBUG nova.network.neutron [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.298170] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Releasing lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.298581] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 555.298768] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 555.299073] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-507fc0c3-e93f-4a17-ab6e-5a4f0b01e4c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.308249] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adca0883-482c-4b55-b248-0a710440420a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.331192] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 834573d9-496f-4c80-b157-e1b12f799418 could not be found. [ 555.331430] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 555.331617] env[62109]: INFO nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Took 0.03 seconds to destroy the instance on the hypervisor. [ 555.331871] env[62109]: DEBUG oslo.service.loopingcall [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.340354] env[62109]: DEBUG nova.compute.manager [-] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 555.340354] env[62109]: DEBUG nova.network.neutron [-] [instance: 834573d9-496f-4c80-b157-e1b12f799418] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 555.361771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.735s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.362466] env[62109]: ERROR nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Traceback (most recent call last): [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self.driver.spawn(context, instance, image_meta, [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] vm_ref = self.build_virtual_machine(instance, [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] vif_infos = vmwarevif.get_vif_info(self._session, [ 555.362466] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] for vif in network_info: [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] return self._sync_wrapper(fn, *args, **kwargs) [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self.wait() [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self[:] = self._gt.wait() [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] return self._exit_event.wait() [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] result = hub.switch() [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 555.362799] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] return self.greenlet.switch() [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] result = function(*args, **kwargs) [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] return func(*args, **kwargs) [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] raise e [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] nwinfo = self.network_api.allocate_for_instance( [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] created_port_ids = self._update_ports_for_instance( [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] with excutils.save_and_reraise_exception(): [ 555.363235] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] self.force_reraise() [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] raise self.value [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] updated_port = self._update_port( [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] _ensure_no_port_binding_failure(port) [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] raise exception.PortBindingFailed(port_id=port['id']) [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] nova.exception.PortBindingFailed: Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. [ 555.363642] env[62109]: ERROR nova.compute.manager [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] [ 555.363915] env[62109]: DEBUG nova.compute.utils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 555.364716] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Build of instance 67c74993-5779-48fb-9a8c-8904a857d22a was re-scheduled: Binding failed for port 9411c776-3d75-4688-8670-d99f84c3a7e9, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 555.365130] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 555.365350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.365487] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquired lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.365638] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 555.366050] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 13acff1f1f85473898632f21ddcec51c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 555.372235] env[62109]: DEBUG nova.network.neutron [-] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.372785] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b47f9bf0ab024070b684920ff3915c59 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 555.373494] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 2.747s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.375248] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 51517622e2bd48e995336ada7727934a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 555.377086] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13acff1f1f85473898632f21ddcec51c [ 555.387039] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b47f9bf0ab024070b684920ff3915c59 [ 555.461006] env[62109]: DEBUG nova.network.neutron [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.461006] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] Expecting reply to msg 670d910f949d4d9f84f4ca953e6f2145 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 555.466884] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 670d910f949d4d9f84f4ca953e6f2145 [ 555.516167] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51517622e2bd48e995336ada7727934a [ 555.599084] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 555.601000] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 7aff3f3b734844799270e8153c873fac in queue reply_7522b64acfeb4981b1f36928b040d568 [ 555.647225] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7aff3f3b734844799270e8153c873fac [ 555.883346] env[62109]: DEBUG nova.network.neutron [-] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.883346] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a987fc6f956f4645bcf519dc781fd446 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 555.896448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a987fc6f956f4645bcf519dc781fd446 [ 555.914311] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.967240] env[62109]: DEBUG oslo_concurrency.lockutils [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] Releasing lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.967240] env[62109]: DEBUG nova.compute.manager [req-b77f474b-6df0-43c4-8986-07a82a03dc3b req-6ccd165f-7eb6-4785-a127-e64a2203dd4b service nova] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Received event network-vif-deleted-e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 555.987355] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc056ee-ca01-407a-af6f-1bdc236c9723 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.995092] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8dc33a-ba01-4996-bc8d-f83b113fd403 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.036571] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d16cf7-2cb4-4c56-8762-778a232889b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.045076] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81cfdb6-d241-45b8-b730-c424c7ad6693 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.060173] env[62109]: DEBUG nova.compute.provider_tree [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.060691] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 4249503dd80b4fefbfb218e64fa8a655 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 556.079119] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4249503dd80b4fefbfb218e64fa8a655 [ 556.134308] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.331125] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.331694] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 641a6570846e4f38bcba76a9dd7d9c29 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 556.350387] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 641a6570846e4f38bcba76a9dd7d9c29 [ 556.384592] env[62109]: INFO nova.compute.manager [-] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Took 1.05 seconds to deallocate network for instance. [ 556.390722] env[62109]: DEBUG nova.compute.claims [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 556.390722] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.566903] env[62109]: DEBUG nova.scheduler.client.report [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.566903] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 79680de4f3ec44878b829ab291c7cf61 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 556.595119] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquiring lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.595119] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.595119] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 7d81668ef2934e1aaa25e6225398040e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 556.614794] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79680de4f3ec44878b829ab291c7cf61 [ 556.616281] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d81668ef2934e1aaa25e6225398040e [ 556.836904] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Releasing lock "refresh_cache-67c74993-5779-48fb-9a8c-8904a857d22a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.836904] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 556.836904] env[62109]: DEBUG nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 556.836904] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 556.869716] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.869716] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 8c6dbf4009104382833222949833d06f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 556.877256] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c6dbf4009104382833222949833d06f [ 557.074588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.701s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.075242] env[62109]: ERROR nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Traceback (most recent call last): [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self.driver.spawn(context, instance, image_meta, [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] vm_ref = self.build_virtual_machine(instance, [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] vif_infos = vmwarevif.get_vif_info(self._session, [ 557.075242] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] for vif in network_info: [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] return self._sync_wrapper(fn, *args, **kwargs) [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self.wait() [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self[:] = self._gt.wait() [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] return self._exit_event.wait() [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] result = hub.switch() [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 557.075551] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] return self.greenlet.switch() [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] result = function(*args, **kwargs) [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] return func(*args, **kwargs) [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] raise e [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] nwinfo = self.network_api.allocate_for_instance( [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] created_port_ids = self._update_ports_for_instance( [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] with excutils.save_and_reraise_exception(): [ 557.075877] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] self.force_reraise() [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] raise self.value [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] updated_port = self._update_port( [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] _ensure_no_port_binding_failure(port) [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] raise exception.PortBindingFailed(port_id=port['id']) [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] nova.exception.PortBindingFailed: Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. [ 557.076216] env[62109]: ERROR nova.compute.manager [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] [ 557.076481] env[62109]: DEBUG nova.compute.utils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 557.078862] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.943s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.078862] env[62109]: INFO nova.compute.claims [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 557.082225] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 1c63dcc5a8a646f187829e87f31fe586 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.082225] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Build of instance 3888f958-a6ea-4159-b2be-1b2b5781f41f was re-scheduled: Binding failed for port e90e5d3a-8fe5-43ff-b4d1-6a967cae6b5a, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 557.082225] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 557.082225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.082225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquired lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.082565] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 557.082737] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 43937ffefab9491ebaa0319c1b31f630 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.100400] env[62109]: DEBUG nova.compute.manager [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 557.102191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg d2bad0e70ff84e63a9bbdb8fd6c5629b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.114386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43937ffefab9491ebaa0319c1b31f630 [ 557.274795] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2bad0e70ff84e63a9bbdb8fd6c5629b [ 557.309280] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c63dcc5a8a646f187829e87f31fe586 [ 557.371693] env[62109]: DEBUG nova.network.neutron [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.371984] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg d2915e60646647ba848a979cd126ae75 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.386762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2915e60646647ba848a979cd126ae75 [ 557.586751] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 2a8164898e724c80aafaa97d583b73fb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.602044] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a8164898e724c80aafaa97d583b73fb [ 557.635266] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.635266] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.760881] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "22179940-4e5b-4879-be19-a9addb0a628c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.761113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "22179940-4e5b-4879-be19-a9addb0a628c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.761571] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg f0e89db65b344b69874bbaaafc13b8db in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.763832] env[62109]: DEBUG nova.compute.manager [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Received event network-changed-051993b2-9892-4974-a10f-35a4bdf58ec2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 557.764285] env[62109]: DEBUG nova.compute.manager [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Refreshing instance network info cache due to event network-changed-051993b2-9892-4974-a10f-35a4bdf58ec2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 557.764550] env[62109]: DEBUG oslo_concurrency.lockutils [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] Acquiring lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.764692] env[62109]: DEBUG oslo_concurrency.lockutils [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] Acquired lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.764845] env[62109]: DEBUG nova.network.neutron [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Refreshing network info cache for port 051993b2-9892-4974-a10f-35a4bdf58ec2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 557.765217] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] Expecting reply to msg b441e4214dcd477da553b7cdfbb263b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.767445] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.767847] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 55cfa233dde94ca3931675e660abc09b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.771823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "15dc4e5a-da5b-4657-8aec-f501d35d7a58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.772270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "15dc4e5a-da5b-4657-8aec-f501d35d7a58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.772393] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 2b335665271f498c969becf7d915a011 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 557.773237] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b441e4214dcd477da553b7cdfbb263b5 [ 557.783744] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55cfa233dde94ca3931675e660abc09b [ 557.800590] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b335665271f498c969becf7d915a011 [ 557.802184] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0e89db65b344b69874bbaaafc13b8db [ 557.877454] env[62109]: INFO nova.compute.manager [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 67c74993-5779-48fb-9a8c-8904a857d22a] Took 1.04 seconds to deallocate network for instance. [ 557.879297] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 237afee94ed44c438233f42b04267339 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.087080] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 237afee94ed44c438233f42b04267339 [ 558.217790] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b5ad71-f370-429a-895c-727e53bbb7fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.230845] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799e8827-834e-46cc-8cf1-d03c828266af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.262631] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fce8d1-6590-4312-b978-8a4730879ce9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.274097] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 558.276769] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 40f6951deb4a4939b9f073bec501f51a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.279446] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 558.284817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 4f61e4b0195a433682c26f904fbb8763 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.287286] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9d1727-d935-472b-ad3b-6b7b921ea491 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.295858] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Releasing lock "refresh_cache-3888f958-a6ea-4159-b2be-1b2b5781f41f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.296101] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 558.296264] env[62109]: DEBUG nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 558.296436] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 558.311608] env[62109]: DEBUG nova.compute.provider_tree [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.311608] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 3b394f1a9ffe4f52ba527119bb3a19b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.340591] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.340591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg bfe3a0969f69432caee629aa3652b6a6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.343150] env[62109]: DEBUG nova.network.neutron [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.346092] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b394f1a9ffe4f52ba527119bb3a19b0 [ 558.351214] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfe3a0969f69432caee629aa3652b6a6 [ 558.383990] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 3513812f5485423bb60f31fc60d7f15a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.390318] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f61e4b0195a433682c26f904fbb8763 [ 558.399919] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40f6951deb4a4939b9f073bec501f51a [ 558.443207] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3513812f5485423bb60f31fc60d7f15a [ 558.501731] env[62109]: DEBUG nova.network.neutron [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.502396] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] Expecting reply to msg 7e8c864c43d944bcb7d1720c13a452aa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.512707] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e8c864c43d944bcb7d1720c13a452aa [ 558.807188] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.813697] env[62109]: DEBUG nova.scheduler.client.report [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 558.816590] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg fd5c48500e5a492199bc9419920ccd04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.818212] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.830894] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd5c48500e5a492199bc9419920ccd04 [ 558.842776] env[62109]: DEBUG nova.network.neutron [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.843400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 27b32867b0e449c3a210fd9038b26f2e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.853209] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27b32867b0e449c3a210fd9038b26f2e [ 558.942100] env[62109]: INFO nova.scheduler.client.report [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Deleted allocations for instance 67c74993-5779-48fb-9a8c-8904a857d22a [ 558.948438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg a3368b90b9d8434ebad87148b0a19632 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 558.977011] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3368b90b9d8434ebad87148b0a19632 [ 559.004517] env[62109]: DEBUG oslo_concurrency.lockutils [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] Releasing lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.004760] env[62109]: DEBUG nova.compute.manager [req-10acbd89-017e-4908-8f9e-9d647991e01f req-a34224a1-c798-426c-9053-d81f68ba238a service nova] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Received event network-vif-deleted-051993b2-9892-4974-a10f-35a4bdf58ec2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 559.319591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.242s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.320125] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 559.322138] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 2d9f41f3e482445b8542c33300b40a7f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 559.322895] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 2.936s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.324756] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 27c1f55db8df4222b506a7e77a1dfa85 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 559.345858] env[62109]: INFO nova.compute.manager [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: 3888f958-a6ea-4159-b2be-1b2b5781f41f] Took 1.05 seconds to deallocate network for instance. [ 559.347372] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 431629a542f147a0bdcba614e9601d11 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 559.412964] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d9f41f3e482445b8542c33300b40a7f [ 559.414605] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27c1f55db8df4222b506a7e77a1dfa85 [ 559.424643] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 431629a542f147a0bdcba614e9601d11 [ 559.450174] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8daba062-bc3d-447a-8796-bdc3dbcb48bd tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "67c74993-5779-48fb-9a8c-8904a857d22a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.477s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.625351] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquiring lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.625579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.626031] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 07b16c0f2126459f8bba577715d77690 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 559.665548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07b16c0f2126459f8bba577715d77690 [ 559.828377] env[62109]: DEBUG nova.compute.utils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 559.828998] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 1938b53b1a664fa8aaaed3ae88e9c4f6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 559.839104] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 559.839304] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 559.853032] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 35898af3bf5f4288b4ad55aef111abd5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 559.854318] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1938b53b1a664fa8aaaed3ae88e9c4f6 [ 559.980712] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35898af3bf5f4288b4ad55aef111abd5 [ 559.997269] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d373a202-a60f-49f3-837a-b60ea16003d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.005964] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb7ed0c-e590-495f-8666-08b5c59757d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.039039] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e61384-8927-4b5d-82fd-8b71901a9a3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.046813] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd296df7-87cc-472e-acc1-77dfc245dfd1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.060826] env[62109]: DEBUG nova.compute.provider_tree [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 560.061389] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg b63aac34d9e24c7494d0a995e6d83326 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 560.074193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b63aac34d9e24c7494d0a995e6d83326 [ 560.127810] env[62109]: DEBUG nova.compute.manager [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 560.129677] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg efa7d8a65f514db5937c85a92fc50851 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 560.300710] env[62109]: DEBUG nova.policy [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e43939b905d44b168a3f82f8158de1e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2274ea1508fa4b67af0d4dd8497524b8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 560.311438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efa7d8a65f514db5937c85a92fc50851 [ 560.334291] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 560.336377] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 3ef2a93574f847f0b968b65d27cca4fa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 560.451102] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef2a93574f847f0b968b65d27cca4fa [ 560.459331] env[62109]: INFO nova.scheduler.client.report [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Deleted allocations for instance 3888f958-a6ea-4159-b2be-1b2b5781f41f [ 560.466931] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 49aa2b05a78648c7bf67faf077d32a98 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 560.536638] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49aa2b05a78648c7bf67faf077d32a98 [ 560.565721] env[62109]: DEBUG nova.scheduler.client.report [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.569060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg b898b31bba33401bbb6f9eb4b785fbd7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 560.643712] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b898b31bba33401bbb6f9eb4b785fbd7 [ 560.675226] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.841399] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 606e08b2dbed49d49d693f470fa64546 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 560.904706] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 606e08b2dbed49d49d693f470fa64546 [ 560.979214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-12f400c0-228e-4fe6-a20e-8d72e6d4c9e8 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "3888f958-a6ea-4159-b2be-1b2b5781f41f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.597s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.072620] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.750s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.073703] env[62109]: ERROR nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] Traceback (most recent call last): [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self.driver.spawn(context, instance, image_meta, [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] vm_ref = self.build_virtual_machine(instance, [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.073703] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] for vif in network_info: [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] return self._sync_wrapper(fn, *args, **kwargs) [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self.wait() [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self[:] = self._gt.wait() [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] return self._exit_event.wait() [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] result = hub.switch() [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.074063] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] return self.greenlet.switch() [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] result = function(*args, **kwargs) [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] return func(*args, **kwargs) [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] raise e [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] nwinfo = self.network_api.allocate_for_instance( [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] created_port_ids = self._update_ports_for_instance( [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] with excutils.save_and_reraise_exception(): [ 561.074541] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] self.force_reraise() [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] raise self.value [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] updated_port = self._update_port( [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] _ensure_no_port_binding_failure(port) [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] raise exception.PortBindingFailed(port_id=port['id']) [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] nova.exception.PortBindingFailed: Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. [ 561.074871] env[62109]: ERROR nova.compute.manager [instance: 834573d9-496f-4c80-b157-e1b12f799418] [ 561.075187] env[62109]: DEBUG nova.compute.utils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 561.075685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.442s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.082001] env[62109]: INFO nova.compute.claims [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.083551] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg ed8a5d84d4ac486a943edc444cf22f91 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 561.089137] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Build of instance 834573d9-496f-4c80-b157-e1b12f799418 was re-scheduled: Binding failed for port 051993b2-9892-4974-a10f-35a4bdf58ec2, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 561.089137] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 561.089137] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.089324] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquired lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.089379] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.089817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg ae7195a82e5c49a1b8cd3a40962497c9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 561.100515] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae7195a82e5c49a1b8cd3a40962497c9 [ 561.192907] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Acquiring lock "5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.193156] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Lock "5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.193615] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 63e93fd2abc7408f8a743fe3fc004a3f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 561.213551] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed8a5d84d4ac486a943edc444cf22f91 [ 561.254384] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63e93fd2abc7408f8a743fe3fc004a3f [ 561.344402] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 561.406066] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 561.406245] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 561.406405] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.406586] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 561.406715] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.406855] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 561.407054] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 561.407206] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 561.407375] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 561.407531] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 561.411713] env[62109]: DEBUG nova.virt.hardware [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 561.411713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad44ee66-f683-4e1f-8298-6a4caddf5971 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.417046] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf7b0e4-f4e5-47d8-95dd-7d58b1833d81 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.592734] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg bb5fdce56a1241e9a047779c6eb99c2e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 561.613582] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb5fdce56a1241e9a047779c6eb99c2e [ 561.655245] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.695330] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 561.697584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 1effeada530c421980301af630645a74 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 561.767055] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Successfully created port: bcd13e48-db65-4575-a819-060b93efb820 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 561.833053] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1effeada530c421980301af630645a74 [ 562.205632] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.206113] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg c089d4d09dee4dbc8dcf93fcf299ef7c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 562.221987] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.234783] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c089d4d09dee4dbc8dcf93fcf299ef7c [ 562.352481] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ae7e64-10dd-4f72-9d12-fb3c105fec9e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.359953] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e6cd8d-fa7c-4f32-8b11-fa536829f8c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.390152] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9a540c-5f8a-4fa8-98cf-5cc6a94786a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.397438] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6bc948-590b-49fe-a1e4-fb0c991f902f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.414348] env[62109]: DEBUG nova.compute.provider_tree [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.414822] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg f7950b9552d349ca93fba9c5b60f7174 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 562.434741] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7950b9552d349ca93fba9c5b60f7174 [ 562.710240] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Releasing lock "refresh_cache-834573d9-496f-4c80-b157-e1b12f799418" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.710534] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 562.710970] env[62109]: DEBUG nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 562.710970] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 562.758528] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.759203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 75e49d3000aa465fa6fbd03965dc7e4d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 562.767418] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75e49d3000aa465fa6fbd03965dc7e4d [ 562.920539] env[62109]: DEBUG nova.scheduler.client.report [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 562.922876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 01787c75881c45439f840676d3445e50 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 562.963360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01787c75881c45439f840676d3445e50 [ 563.262578] env[62109]: DEBUG nova.network.neutron [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.263110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 132466886cae40c9a4f12390b93ef2f5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 563.280019] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 132466886cae40c9a4f12390b93ef2f5 [ 563.426115] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.426667] env[62109]: DEBUG nova.compute.manager [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 563.428432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg c19517883fe84fd4a5f14e646d7433f0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 563.429651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.622s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.434938] env[62109]: INFO nova.compute.claims [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.438563] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 32f61e2b24ae49f8a9255176c2c98aa3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 563.622552] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32f61e2b24ae49f8a9255176c2c98aa3 [ 563.625016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c19517883fe84fd4a5f14e646d7433f0 [ 563.766152] env[62109]: INFO nova.compute.manager [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 834573d9-496f-4c80-b157-e1b12f799418] Took 1.06 seconds to deallocate network for instance. [ 563.768073] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg b3563635175f4defbb193a9c27bed91d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 563.840932] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3563635175f4defbb193a9c27bed91d [ 563.942745] env[62109]: DEBUG nova.compute.utils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.943398] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg c5c88e499e784891a040594a06b980e3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 563.948354] env[62109]: DEBUG nova.compute.manager [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 563.948354] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 14d7049abf304c6eb74868e7923daffb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 563.955516] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14d7049abf304c6eb74868e7923daffb [ 563.956089] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5c88e499e784891a040594a06b980e3 [ 564.272897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 13937b1e8d8342899949639309e519f0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 564.352709] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13937b1e8d8342899949639309e519f0 [ 564.447990] env[62109]: DEBUG nova.compute.manager [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 564.449908] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 550f236f1a61405aa9ed64f357242553 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 564.562185] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 550f236f1a61405aa9ed64f357242553 [ 564.601823] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a2c866-08da-4a8c-9613-57c94d82ea0c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.611052] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b16f97-7270-4774-8ca0-b7ebaf30c62e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.657741] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4cbf6a-9936-477c-8f41-ff7ff5d5d1c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.676946] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a8a649-6648-4bbb-833a-203c81767ad1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.709264] env[62109]: DEBUG nova.compute.provider_tree [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.709849] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 45d3869e3a8b4bbaa0a68ffa386f52e1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 564.721923] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45d3869e3a8b4bbaa0a68ffa386f52e1 [ 564.810261] env[62109]: INFO nova.scheduler.client.report [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Deleted allocations for instance 834573d9-496f-4c80-b157-e1b12f799418 [ 564.826268] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg a93a32874d524c3bb2bfb9d5de581d42 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 564.828945] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Acquiring lock "2a473614-2051-47ab-a9bc-f87385a264cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.829127] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Lock "2a473614-2051-47ab-a9bc-f87385a264cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.829723] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 4d48ef1eea5440708b9c8de029df1bcc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 564.848017] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a93a32874d524c3bb2bfb9d5de581d42 [ 564.868731] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d48ef1eea5440708b9c8de029df1bcc [ 564.958156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 44d613cdcddc4edf9da881c1511ac1a3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 565.024132] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44d613cdcddc4edf9da881c1511ac1a3 [ 565.214883] env[62109]: DEBUG nova.scheduler.client.report [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 565.217529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 6476e972b92e47718b8ff3af16825cd1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 565.250047] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6476e972b92e47718b8ff3af16825cd1 [ 565.332533] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bbae6cd6-d8e2-4aa4-b815-9cd0580620e8 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "834573d9-496f-4c80-b157-e1b12f799418" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.533s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.332533] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 565.334432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 5c79e3dc4b5f4a77aef0f11da435a410 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 565.396139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c79e3dc4b5f4a77aef0f11da435a410 [ 565.466392] env[62109]: DEBUG nova.compute.manager [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 565.512682] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 565.512921] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 565.513069] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.513243] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 565.513380] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.513519] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 565.513719] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 565.513926] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 565.514013] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 565.514165] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 565.514323] env[62109]: DEBUG nova.virt.hardware [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 565.516272] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c86459-b2a1-4066-aaa4-5a357d841c8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.525878] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d53f56-62ef-4346-a9c9-5a894c509c9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.543174] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 565.556769] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 565.557145] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bdd7b560-85fa-4c68-8181-9b4041a5de90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.568615] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Created folder: OpenStack in parent group-v4. [ 565.568820] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Creating folder: Project (19d0c5c1785f4cfda4bcfd4dd7e1a796). Parent ref: group-v108864. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 565.569075] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e0996c8-d077-4fe2-a77c-f09d4ba4f9b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.579443] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Created folder: Project (19d0c5c1785f4cfda4bcfd4dd7e1a796) in parent group-v108864. [ 565.579671] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Creating folder: Instances. Parent ref: group-v108865. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 565.579913] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b468ac47-6227-4155-b11a-bbdf7da00306 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.588143] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Created folder: Instances in parent group-v108865. [ 565.588563] env[62109]: DEBUG oslo.service.loopingcall [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 565.588644] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 565.588812] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8d65252-dba5-4216-bb82-471c57a9727e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.607532] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 565.607532] env[62109]: value = "task-401429" [ 565.607532] env[62109]: _type = "Task" [ 565.607532] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.621990] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401429, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.722614] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.292s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.722614] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 565.724233] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 27a3087e7ba64b0587cd99aa4182b2d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 565.727191] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.907s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.727191] env[62109]: INFO nova.compute.claims [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.728883] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 3dac9d6e502e45ecbbecdc3e0202b808 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 565.806518] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27a3087e7ba64b0587cd99aa4182b2d7 [ 565.811060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dac9d6e502e45ecbbecdc3e0202b808 [ 565.860669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.117575] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401429, 'name': CreateVM_Task, 'duration_secs': 0.299275} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.117733] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 566.118753] env[62109]: DEBUG oslo_vmware.service [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670091ae-c8f5-4ee5-837f-57885470b147 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.124657] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.124818] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.125495] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 566.125736] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f8a053a-63c5-4897-bf3e-e3a4cc3c8a35 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.130522] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 566.130522] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]524c45ea-7858-d342-5a13-00203b81c732" [ 566.130522] env[62109]: _type = "Task" [ 566.130522] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.141691] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]524c45ea-7858-d342-5a13-00203b81c732, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.232276] env[62109]: DEBUG nova.compute.utils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 566.232965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 6341e3ff6e3748dda9256a51e0b31d6b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 566.234927] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 8283505c24934221a81fe1e8b428db2d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 566.235871] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 566.236050] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 566.246713] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6341e3ff6e3748dda9256a51e0b31d6b [ 566.251872] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8283505c24934221a81fe1e8b428db2d [ 566.592733] env[62109]: DEBUG nova.policy [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6349e1aff7d945a6a471b1f4e826b23f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61a866168186462d9d849072a1ff25f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 566.641854] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.642222] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 566.642356] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.643146] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.643645] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 566.643872] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce1449a6-9985-41f4-8d64-460259b8fde2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.661237] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 566.661609] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 566.664913] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e595573b-e722-451c-9890-a0c484c51a35 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.672970] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-052d37d9-1e08-4ef5-9860-e4eab3973cf5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.677993] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 566.677993] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52608310-1194-c369-2ed7-58922e4091b8" [ 566.677993] env[62109]: _type = "Task" [ 566.677993] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.685652] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52608310-1194-c369-2ed7-58922e4091b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.736496] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 566.738341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg b3e626ad8d87407786cc5d52ab648907 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 566.798345] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3e626ad8d87407786cc5d52ab648907 [ 566.807154] env[62109]: ERROR nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. [ 566.807154] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 566.807154] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.807154] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 566.807154] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 566.807154] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 566.807154] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 566.807154] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 566.807154] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.807154] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 566.807154] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.807154] env[62109]: ERROR nova.compute.manager raise self.value [ 566.807154] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 566.807154] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 566.807154] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.807154] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 566.807709] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.807709] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 566.807709] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. [ 566.807709] env[62109]: ERROR nova.compute.manager [ 566.807709] env[62109]: Traceback (most recent call last): [ 566.807709] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 566.807709] env[62109]: listener.cb(fileno) [ 566.807709] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.807709] env[62109]: result = function(*args, **kwargs) [ 566.807709] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 566.807709] env[62109]: return func(*args, **kwargs) [ 566.807709] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.807709] env[62109]: raise e [ 566.807709] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.807709] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 566.807709] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 566.807709] env[62109]: created_port_ids = self._update_ports_for_instance( [ 566.807709] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 566.807709] env[62109]: with excutils.save_and_reraise_exception(): [ 566.807709] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.807709] env[62109]: self.force_reraise() [ 566.807709] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.807709] env[62109]: raise self.value [ 566.807709] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 566.807709] env[62109]: updated_port = self._update_port( [ 566.807709] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.807709] env[62109]: _ensure_no_port_binding_failure(port) [ 566.807709] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.807709] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 566.808529] env[62109]: nova.exception.PortBindingFailed: Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. [ 566.808529] env[62109]: Removing descriptor: 16 [ 566.808529] env[62109]: ERROR nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Traceback (most recent call last): [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] yield resources [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self.driver.spawn(context, instance, image_meta, [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self._vmops.spawn(context, instance, image_meta, injected_files, [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 566.808529] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] vm_ref = self.build_virtual_machine(instance, [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] vif_infos = vmwarevif.get_vif_info(self._session, [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] for vif in network_info: [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] return self._sync_wrapper(fn, *args, **kwargs) [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self.wait() [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self[:] = self._gt.wait() [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] return self._exit_event.wait() [ 566.808882] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] result = hub.switch() [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] return self.greenlet.switch() [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] result = function(*args, **kwargs) [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] return func(*args, **kwargs) [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] raise e [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] nwinfo = self.network_api.allocate_for_instance( [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 566.809300] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] created_port_ids = self._update_ports_for_instance( [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] with excutils.save_and_reraise_exception(): [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self.force_reraise() [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] raise self.value [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] updated_port = self._update_port( [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] _ensure_no_port_binding_failure(port) [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.809697] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] raise exception.PortBindingFailed(port_id=port['id']) [ 566.809991] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] nova.exception.PortBindingFailed: Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. [ 566.809991] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] [ 566.809991] env[62109]: INFO nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Terminating instance [ 566.809991] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.809991] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquired lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.809991] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 566.810178] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg f36e27639afd4ed0b3220286a5d7e60b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 566.818154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f36e27639afd4ed0b3220286a5d7e60b [ 566.858159] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0245bb-bd9a-4b7d-bfaa-9110427b524a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.866329] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974e504f-6b0d-4852-aaff-29ea40488198 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.900558] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b9bc67-377b-4177-b87e-28c2d6ebf136 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.907774] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d679be-9310-4675-a053-a5c234c72428 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.923635] env[62109]: DEBUG nova.compute.provider_tree [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.924726] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg fcd4c12a510146b5ac711774eb15268c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 566.932438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcd4c12a510146b5ac711774eb15268c [ 567.188198] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 567.188584] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Creating directory with path [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 567.188694] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b93bd52b-7f52-4255-87d2-9d31dfb07e83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.208348] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Created directory with path [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 567.208525] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Fetch image to [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 567.208707] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Downloading image file data 4800b6ec-9841-4c82-b42e-97cce3beeec5 to [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk on the data store datastore1 {{(pid=62109) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 567.209487] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ae5d08-16d6-4070-9e17-606bf04a6c0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.216480] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fd5a3e-b8f3-40a7-80f3-b3e10b93d481 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.227356] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59871d4-920a-4ebb-a765-e263ed23840a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.261940] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 07e665bc45b14978bcb1e3feb060c872 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 567.263801] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfb45d3-ca06-4aec-acc8-d44177d9c673 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.270492] env[62109]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-08376c39-649a-4aa7-8089-7bfa9e894c8d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.303386] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Downloading image file data 4800b6ec-9841-4c82-b42e-97cce3beeec5 to the data store datastore1 {{(pid=62109) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 567.324299] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07e665bc45b14978bcb1e3feb060c872 [ 567.372444] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.383797] env[62109]: DEBUG oslo_vmware.rw_handles [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 567.446345] env[62109]: DEBUG nova.scheduler.client.report [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 567.449876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 88a201ac5a704ab6837b92cba5ea746a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 567.467535] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88a201ac5a704ab6837b92cba5ea746a [ 567.710831] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.711646] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg bdb90f8d5c0a4a8b81ad694546bf1147 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 567.722963] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdb90f8d5c0a4a8b81ad694546bf1147 [ 567.769047] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 567.806208] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.806208] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.806208] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.806362] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.806362] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.806362] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.806362] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.806362] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.806517] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.806517] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.806517] env[62109]: DEBUG nova.virt.hardware [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.807583] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1ddf32-3382-41ce-bcc0-39a5a07aec5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.820434] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c424bad-0bfc-4e37-97d6-7b2db22719bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.862464] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Successfully created port: 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.957554] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.232s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.958263] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 567.959966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 248645a01d4f4acd82e0f314731d85d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 567.962625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.288s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.964967] env[62109]: INFO nova.compute.claims [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 567.967016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 571f6458a8664bd6882c1df72c7b7290 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 568.030828] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 248645a01d4f4acd82e0f314731d85d0 [ 568.043739] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 571f6458a8664bd6882c1df72c7b7290 [ 568.161272] env[62109]: DEBUG oslo_vmware.rw_handles [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 568.161849] env[62109]: DEBUG oslo_vmware.rw_handles [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 568.218710] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Releasing lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.219277] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 568.240536] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 568.240536] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a540e833-7775-4f6a-aa40-153fb3432c57 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.240536] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e26fc5e-cd62-4769-919d-fd23790d1cc1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.247825] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Downloaded image file data 4800b6ec-9841-4c82-b42e-97cce3beeec5 to vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk on the data store datastore1 {{(pid=62109) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 568.247825] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 568.247825] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Copying Virtual Disk [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk to [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 568.247825] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ac23aa4-72df-4a0d-871b-0ed3e4afd97d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.272016] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66 could not be found. [ 568.272240] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 568.272362] env[62109]: INFO nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Took 0.05 seconds to destroy the instance on the hypervisor. [ 568.272585] env[62109]: DEBUG oslo.service.loopingcall [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.274801] env[62109]: DEBUG nova.compute.manager [-] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 568.274801] env[62109]: DEBUG nova.network.neutron [-] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 568.286804] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 568.286804] env[62109]: value = "task-401430" [ 568.286804] env[62109]: _type = "Task" [ 568.286804] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.294715] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.337674] env[62109]: DEBUG nova.network.neutron [-] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.337792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7da862ab3d7048dfb4d55a08c6a7ea56 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 568.354555] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7da862ab3d7048dfb4d55a08c6a7ea56 [ 568.470245] env[62109]: DEBUG nova.compute.utils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 568.470899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg fd8ba7861b694aba8966420a2b99ee67 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 568.472929] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 7cd8950884d4435eb8d905ab2e48ef23 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 568.473693] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 568.473845] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 568.491346] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd8ba7861b694aba8966420a2b99ee67 [ 568.492392] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cd8950884d4435eb8d905ab2e48ef23 [ 568.561595] env[62109]: DEBUG nova.policy [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e43939b905d44b168a3f82f8158de1e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2274ea1508fa4b67af0d4dd8497524b8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 568.801043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Acquiring lock "32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.801250] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Lock "32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.802199] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg b5989142c28e4575a0111b64308289d3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 568.802767] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401430, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.814737] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5989142c28e4575a0111b64308289d3 [ 568.841629] env[62109]: DEBUG nova.network.neutron [-] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.842374] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 49fc6a2f850e4cf58065fa2deb37d3a1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 568.851486] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49fc6a2f850e4cf58065fa2deb37d3a1 [ 568.977492] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 568.979306] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 2b5d956f49e74470aad90af269c95650 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 569.041581] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b5d956f49e74470aad90af269c95650 [ 569.118727] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fa6893-35c2-426f-8aea-78840bad7df6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.126511] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0ed810-088f-4d9d-b895-a88e44b9dc55 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.160784] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ad31eb-4c6c-4e77-8cd6-3fcbefc87851 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.168347] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48060ca-4fab-470f-9293-15db009f7396 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.182237] env[62109]: DEBUG nova.compute.provider_tree [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.182783] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg e2b341839b8642da9227b9a46068c350 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 569.191768] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2b341839b8642da9227b9a46068c350 [ 569.300152] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686149} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.300152] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Copied Virtual Disk [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk to [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 569.300152] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Deleting the datastore file [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 569.300152] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7fbc57f-1dee-4636-b0f5-40a7c871442e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.304901] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 569.306450] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 0d31d935777844e49dd0d9d259f9b64b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 569.307459] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 569.307459] env[62109]: value = "task-401431" [ 569.307459] env[62109]: _type = "Task" [ 569.307459] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.323638] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.349213] env[62109]: INFO nova.compute.manager [-] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Took 1.07 seconds to deallocate network for instance. [ 569.352584] env[62109]: DEBUG nova.compute.claims [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 569.352788] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.372904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d31d935777844e49dd0d9d259f9b64b [ 569.486139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg f44635f5d82946ec8fa085f781b03794 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 569.528588] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f44635f5d82946ec8fa085f781b03794 [ 569.643909] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Successfully created port: 38c57439-2b4f-48ef-884c-f248749447a4 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.688500] env[62109]: DEBUG nova.scheduler.client.report [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.688500] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 5b224cc6f4e948c8a48eb86b4dfcf6d1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 569.702170] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b224cc6f4e948c8a48eb86b4dfcf6d1 [ 569.823449] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401431, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040087} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.823753] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 569.824061] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Moving file from [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd/4800b6ec-9841-4c82-b42e-97cce3beeec5 to [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5. {{(pid=62109) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 569.824320] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-06e4deb9-456f-41ed-91d1-1b88f6a5256e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.832047] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.832047] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 569.832047] env[62109]: value = "task-401432" [ 569.832047] env[62109]: _type = "Task" [ 569.832047] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.847054] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401432, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.954116] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "c742fcf9-ac27-4a04-81a2-d99741dba794" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.954367] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "c742fcf9-ac27-4a04-81a2-d99741dba794" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.955423] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg f824f3480c39403e9e01c516cfd91ea5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 569.973784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f824f3480c39403e9e01c516cfd91ea5 [ 569.989414] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 570.021404] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 570.021637] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 570.021830] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 570.022031] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 570.022193] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 570.022336] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 570.022718] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 570.022757] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 570.022889] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 570.023059] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 570.023224] env[62109]: DEBUG nova.virt.hardware [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 570.024508] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5816074-43d3-4585-b50d-bc6352046aa9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.034997] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa57e41-a219-4618-ac8f-f4120e9d302a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.199048] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.199048] env[62109]: DEBUG nova.compute.manager [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 570.199048] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 0f30ee20febc4a0d8f360bfe6478e03e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 570.199048] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.973s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.199048] env[62109]: INFO nova.compute.claims [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.199694] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg c6b199bb08b64e43a1abb2fb7d5e3a81 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 570.234689] env[62109]: DEBUG nova.compute.manager [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Received event network-changed-bcd13e48-db65-4575-a819-060b93efb820 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 570.235497] env[62109]: DEBUG nova.compute.manager [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Refreshing instance network info cache due to event network-changed-bcd13e48-db65-4575-a819-060b93efb820. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 570.235969] env[62109]: DEBUG oslo_concurrency.lockutils [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] Acquiring lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.236279] env[62109]: DEBUG oslo_concurrency.lockutils [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] Acquired lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.236609] env[62109]: DEBUG nova.network.neutron [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Refreshing network info cache for port bcd13e48-db65-4575-a819-060b93efb820 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 570.237280] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] Expecting reply to msg 8add844748674a99b2686d24fa402ce6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 570.251523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f30ee20febc4a0d8f360bfe6478e03e [ 570.253709] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8add844748674a99b2686d24fa402ce6 [ 570.258711] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6b199bb08b64e43a1abb2fb7d5e3a81 [ 570.342467] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401432, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024213} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.342939] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] File moved {{(pid=62109) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 570.343277] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Cleaning up location [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 570.343552] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Deleting the datastore file [datastore1] vmware_temp/830f485a-5e13-43bf-bb6e-fd214d4eedbd {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 570.343896] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9a00981-a37a-4ed7-b834-2c689332772a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.353040] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 570.353040] env[62109]: value = "task-401433" [ 570.353040] env[62109]: _type = "Task" [ 570.353040] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.361076] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.457761] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 570.459634] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 80ad8dd607b14276b605b6d36151f5b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 570.498963] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80ad8dd607b14276b605b6d36151f5b5 [ 570.701000] env[62109]: DEBUG nova.compute.utils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 570.701670] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 0534168d02a4461f91023abeeb39ee52 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 570.706026] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 7c7604fa0fb7420180af5dca7488fbf7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 570.706026] env[62109]: DEBUG nova.compute.manager [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 570.722709] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c7604fa0fb7420180af5dca7488fbf7 [ 570.727360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0534168d02a4461f91023abeeb39ee52 [ 570.863724] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.057143} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.864022] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 570.864726] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60376f07-2c57-43d1-9e1f-e38c51b7379d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.869977] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 570.869977] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]524dc2c1-35e6-ed65-3d54-8e433c86f74e" [ 570.869977] env[62109]: _type = "Task" [ 570.869977] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.879326] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]524dc2c1-35e6-ed65-3d54-8e433c86f74e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.907633] env[62109]: DEBUG nova.network.neutron [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 570.985234] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.054628] env[62109]: DEBUG nova.network.neutron [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.055192] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] Expecting reply to msg 47e34d0cfbb94d05848697747981e547 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 571.065060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47e34d0cfbb94d05848697747981e547 [ 571.206762] env[62109]: DEBUG nova.compute.manager [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 571.208667] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg d5d9f89d369f48dd9ca6874a16fd7728 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 571.279416] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5d9f89d369f48dd9ca6874a16fd7728 [ 571.381904] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]524dc2c1-35e6-ed65-3d54-8e433c86f74e, 'name': SearchDatastore_Task, 'duration_secs': 0.010784} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.382481] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.382883] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f/7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 571.383289] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e110397b-d9cd-4550-9d19-11dcc40ecfb7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.390993] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 571.390993] env[62109]: value = "task-401434" [ 571.390993] env[62109]: _type = "Task" [ 571.390993] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.397458] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89223b6e-ec36-4745-b92b-753c960ff406 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.405491] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401434, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.408270] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d0d593-5957-436e-b2f9-d25349fe2237 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.440449] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30641bce-5304-4e61-a29d-626f298cd1fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.448749] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46915e72-c4cc-4d28-ab30-f88f60f432cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.462219] env[62109]: DEBUG nova.compute.provider_tree [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.463040] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg a2d681e131fc4255a0302addd69f4bf5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 571.471482] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2d681e131fc4255a0302addd69f4bf5 [ 571.557698] env[62109]: DEBUG oslo_concurrency.lockutils [req-4c5c566f-4841-4efa-8b4e-c589f2009909 req-f9eabd34-d93a-4a03-88b8-b22bc12e1c96 service nova] Releasing lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.726357] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 898e771c98864db7be2b84f45d4a8a77 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 571.769461] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898e771c98864db7be2b84f45d4a8a77 [ 571.904236] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401434, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.966415] env[62109]: DEBUG nova.scheduler.client.report [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.968956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg df6a66e1432140d4b0b2d3984cbac99c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 571.983775] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df6a66e1432140d4b0b2d3984cbac99c [ 572.116548] env[62109]: ERROR nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. [ 572.116548] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 572.116548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 572.116548] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 572.116548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 572.116548] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 572.116548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 572.116548] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 572.116548] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 572.116548] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 572.116548] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 572.116548] env[62109]: ERROR nova.compute.manager raise self.value [ 572.116548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 572.116548] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 572.116548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 572.116548] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 572.117151] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 572.117151] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 572.117151] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. [ 572.117151] env[62109]: ERROR nova.compute.manager [ 572.117151] env[62109]: Traceback (most recent call last): [ 572.117151] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 572.117151] env[62109]: listener.cb(fileno) [ 572.117151] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 572.117151] env[62109]: result = function(*args, **kwargs) [ 572.117151] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 572.117151] env[62109]: return func(*args, **kwargs) [ 572.117151] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 572.117151] env[62109]: raise e [ 572.117151] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 572.117151] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 572.117151] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 572.117151] env[62109]: created_port_ids = self._update_ports_for_instance( [ 572.117151] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 572.117151] env[62109]: with excutils.save_and_reraise_exception(): [ 572.117151] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 572.117151] env[62109]: self.force_reraise() [ 572.117151] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 572.117151] env[62109]: raise self.value [ 572.117151] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 572.117151] env[62109]: updated_port = self._update_port( [ 572.117151] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 572.117151] env[62109]: _ensure_no_port_binding_failure(port) [ 572.117151] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 572.117151] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 572.118035] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. [ 572.118035] env[62109]: Removing descriptor: 14 [ 572.118035] env[62109]: ERROR nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Traceback (most recent call last): [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] yield resources [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self.driver.spawn(context, instance, image_meta, [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 572.118035] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] vm_ref = self.build_virtual_machine(instance, [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] vif_infos = vmwarevif.get_vif_info(self._session, [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] for vif in network_info: [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] return self._sync_wrapper(fn, *args, **kwargs) [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self.wait() [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self[:] = self._gt.wait() [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] return self._exit_event.wait() [ 572.118410] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] result = hub.switch() [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] return self.greenlet.switch() [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] result = function(*args, **kwargs) [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] return func(*args, **kwargs) [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] raise e [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] nwinfo = self.network_api.allocate_for_instance( [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 572.118806] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] created_port_ids = self._update_ports_for_instance( [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] with excutils.save_and_reraise_exception(): [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self.force_reraise() [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] raise self.value [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] updated_port = self._update_port( [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] _ensure_no_port_binding_failure(port) [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 572.119225] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] raise exception.PortBindingFailed(port_id=port['id']) [ 572.119572] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] nova.exception.PortBindingFailed: Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. [ 572.119572] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] [ 572.119572] env[62109]: INFO nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Terminating instance [ 572.120670] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.120864] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.121050] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.121483] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 53f38f34f87f406aa5051ad280989172 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 572.130046] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53f38f34f87f406aa5051ad280989172 [ 572.228511] env[62109]: DEBUG nova.compute.manager [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 572.249169] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 572.249414] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 572.249567] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 572.249746] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 572.249888] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 572.250033] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 572.250235] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 572.250383] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 572.250542] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 572.250696] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 572.250857] env[62109]: DEBUG nova.virt.hardware [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 572.251751] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976772db-c15e-4816-81f3-68cd603f9b58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.260669] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4ba7f6-b43b-4c7f-846a-94d3e671f7bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.274566] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.281004] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Creating folder: Project (3eda153b02f14cdb99e65d35ebd0a260). Parent ref: group-v108864. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.281252] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fb404b6-d39a-4637-9633-6c186c6fe54e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.290134] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Created folder: Project (3eda153b02f14cdb99e65d35ebd0a260) in parent group-v108864. [ 572.290314] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Creating folder: Instances. Parent ref: group-v108868. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.290531] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef9f13d3-f755-4a58-bc6a-b8889d9fa7e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.298375] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Created folder: Instances in parent group-v108868. [ 572.298597] env[62109]: DEBUG oslo.service.loopingcall [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.298774] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 572.298957] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba679030-9a92-46d0-be87-169bebaeb51b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.314603] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.314603] env[62109]: value = "task-401437" [ 572.314603] env[62109]: _type = "Task" [ 572.314603] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.321485] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401437, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.402485] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401434, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671215} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.402817] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f/7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 572.403040] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 572.403296] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-611a5aa3-128e-4f09-87db-0512d2e9a938 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.409401] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 572.409401] env[62109]: value = "task-401438" [ 572.409401] env[62109]: _type = "Task" [ 572.409401] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.418054] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401438, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.472405] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.472848] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 572.474686] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg c9ef31f0457f4925aa7d047d2c43646d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 572.480154] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.615s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.480154] env[62109]: INFO nova.compute.claims [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 572.480154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg c03f9cd0e3164e97ada7f4d54944944f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 572.538092] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9ef31f0457f4925aa7d047d2c43646d [ 572.546887] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c03f9cd0e3164e97ada7f4d54944944f [ 572.667280] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.824357] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401437, 'name': CreateVM_Task, 'duration_secs': 0.416515} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.824524] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 572.825448] env[62109]: DEBUG oslo_vmware.service [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f1471c-1fc5-4340-9585-fc05b0ab33b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.831293] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.831489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.831852] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.832103] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2068f1bd-525e-4281-96f4-78d9a81766b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.836223] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 572.836223] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]527c39d4-7171-094b-6000-3e8cb4766966" [ 572.836223] env[62109]: _type = "Task" [ 572.836223] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.844120] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]527c39d4-7171-094b-6000-3e8cb4766966, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.923162] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401438, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058701} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.923455] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 572.924380] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab188d6a-2e43-4360-819f-714c923b1d41 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.951699] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f/7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 572.956102] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c580548-c667-4c28-a433-466af1d3d198 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.975653] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 572.975653] env[62109]: value = "task-401439" [ 572.975653] env[62109]: _type = "Task" [ 572.975653] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.981960] env[62109]: DEBUG nova.compute.utils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 572.982576] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 7827d18369d84edbae87dbb1dcb02da2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 572.989549] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg a6833a9b51f04958ad9efeafc3478a03 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 572.990618] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 572.990709] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 572.993228] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.001362] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7827d18369d84edbae87dbb1dcb02da2 [ 573.002278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6833a9b51f04958ad9efeafc3478a03 [ 573.061422] env[62109]: DEBUG nova.policy [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf0ea1832e344b5ebd8131b1aefea18e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8774e76528ec41a58ad05902aeb87360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 573.091266] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.091872] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 1e1a9f452728425585e09935915583d8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 573.102005] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e1a9f452728425585e09935915583d8 [ 573.347623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.347623] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.347623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.347623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.348230] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 573.348230] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47d842dc-ce54-4dd0-9aa8-137e426ecdef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.355445] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 573.355445] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 573.357625] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0548aefd-db81-4bf8-8b5d-eae0bf6c5334 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.363488] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-701a036d-6eb5-4f3d-b4c9-9939b313e3cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.369355] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 573.369355] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52bc4b79-8279-5d25-bda1-528d82311c4a" [ 573.369355] env[62109]: _type = "Task" [ 573.369355] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.386370] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52bc4b79-8279-5d25-bda1-528d82311c4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.423298] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Acquiring lock "d24eec8f-565a-4a02-834c-267e633ebb12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.423298] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Lock "d24eec8f-565a-4a02-834c-267e633ebb12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.423298] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg c7d057197c034f41824b9ff0377421ac in queue reply_7522b64acfeb4981b1f36928b040d568 [ 573.432945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7d057197c034f41824b9ff0377421ac [ 573.486574] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401439, 'name': ReconfigVM_Task, 'duration_secs': 0.259988} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.486574] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f/7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 573.487107] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f83964ae-ed8f-4886-906c-fed9ba4d32b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.493304] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 573.493304] env[62109]: value = "task-401440" [ 573.493304] env[62109]: _type = "Task" [ 573.493304] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.501274] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 573.503366] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 5b2a7075dd764b6893ba5104e5994846 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 573.513471] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401440, 'name': Rename_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.561248] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b2a7075dd764b6893ba5104e5994846 [ 573.595327] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.596619] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 573.596906] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 573.598192] env[62109]: ERROR nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. [ 573.598192] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 573.598192] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 573.598192] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 573.598192] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.598192] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 573.598192] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.598192] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 573.598192] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.598192] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 573.598192] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.598192] env[62109]: ERROR nova.compute.manager raise self.value [ 573.598192] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.598192] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 573.598192] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.598192] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 573.599105] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.599105] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 573.599105] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. [ 573.599105] env[62109]: ERROR nova.compute.manager [ 573.599105] env[62109]: Traceback (most recent call last): [ 573.599105] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 573.599105] env[62109]: listener.cb(fileno) [ 573.599105] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.599105] env[62109]: result = function(*args, **kwargs) [ 573.599105] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 573.599105] env[62109]: return func(*args, **kwargs) [ 573.599105] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 573.599105] env[62109]: raise e [ 573.599105] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 573.599105] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 573.599105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.599105] env[62109]: created_port_ids = self._update_ports_for_instance( [ 573.599105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.599105] env[62109]: with excutils.save_and_reraise_exception(): [ 573.599105] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.599105] env[62109]: self.force_reraise() [ 573.599105] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.599105] env[62109]: raise self.value [ 573.599105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.599105] env[62109]: updated_port = self._update_port( [ 573.599105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.599105] env[62109]: _ensure_no_port_binding_failure(port) [ 573.599105] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.599105] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 573.599985] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. [ 573.599985] env[62109]: Removing descriptor: 16 [ 573.600093] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e59c09b-05b0-48d1-9ada-a902ad5f0a04 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.603925] env[62109]: ERROR nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Traceback (most recent call last): [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] yield resources [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self.driver.spawn(context, instance, image_meta, [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self._vmops.spawn(context, instance, image_meta, injected_files, [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] vm_ref = self.build_virtual_machine(instance, [ 573.603925] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] vif_infos = vmwarevif.get_vif_info(self._session, [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] for vif in network_info: [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] return self._sync_wrapper(fn, *args, **kwargs) [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self.wait() [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self[:] = self._gt.wait() [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] return self._exit_event.wait() [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 573.604308] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] result = hub.switch() [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] return self.greenlet.switch() [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] result = function(*args, **kwargs) [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] return func(*args, **kwargs) [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] raise e [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] nwinfo = self.network_api.allocate_for_instance( [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] created_port_ids = self._update_ports_for_instance( [ 573.604739] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] with excutils.save_and_reraise_exception(): [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self.force_reraise() [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] raise self.value [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] updated_port = self._update_port( [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] _ensure_no_port_binding_failure(port) [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] raise exception.PortBindingFailed(port_id=port['id']) [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] nova.exception.PortBindingFailed: Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. [ 573.605179] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] [ 573.605635] env[62109]: INFO nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Terminating instance [ 573.608083] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.608257] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquired lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.608511] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 573.609020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 17d1d0b0f1944ed1930578908357f45b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 573.617143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17d1d0b0f1944ed1930578908357f45b [ 573.626433] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f68e50-f07a-43c0-a4cb-b4420614db86 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.660041] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 22179940-4e5b-4879-be19-a9addb0a628c could not be found. [ 573.660738] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 573.660738] env[62109]: INFO nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Took 0.06 seconds to destroy the instance on the hypervisor. [ 573.660738] env[62109]: DEBUG oslo.service.loopingcall [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 573.661235] env[62109]: DEBUG nova.compute.manager [-] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 573.661327] env[62109]: DEBUG nova.network.neutron [-] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 573.704961] env[62109]: DEBUG nova.network.neutron [-] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.706624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 720e7faa899346fbbd92262ee852ce76 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 573.714461] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 720e7faa899346fbbd92262ee852ce76 [ 573.750781] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1212c434-4db0-451d-aecc-42a057cbcf0a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.754458] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Successfully created port: 228e8a13-db93-4bfa-b8bc-ab5292e785b1 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.761277] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8caa168f-df4e-4663-91b5-ba2fc4b5aec7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.795757] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb33a7a-b304-4529-936d-06fabd632b79 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.803828] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4a9eb7-bec7-4cbf-9d18-965ebdf52301 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.824247] env[62109]: DEBUG nova.compute.provider_tree [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.825115] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 32594e82ba17471dbb0bef3fab4ca153 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 573.834007] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32594e82ba17471dbb0bef3fab4ca153 [ 573.882352] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Preparing fetch location {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 573.882626] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Creating directory with path [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 573.883023] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ab484f9-8d25-4432-9c9a-8a204e04e8fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.903581] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Created directory with path [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 573.903778] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Fetch image to [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 573.903941] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Downloading image file data 4800b6ec-9841-4c82-b42e-97cce3beeec5 to [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk on the data store datastore2 {{(pid=62109) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 573.904827] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f40972b-e602-4868-b4e8-12d162c912f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.912820] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dfbb66-8a48-48ab-a7de-cdb04eddedd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.924627] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ec9a0c-d024-4f25-8b19-043b0610f39b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.928626] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 573.930486] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 8034cb242c85440fa196ec69eb1074ee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 573.961016] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb77ee5b-01b3-4018-bfa9-aa469b8b3227 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.967258] env[62109]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7b222544-c79a-40d3-9dc7-014be4d7caee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.004521] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401440, 'name': Rename_Task, 'duration_secs': 0.138007} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.004786] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 574.005029] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30036c52-198c-4692-89fe-2f821457c035 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.013441] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 18131c4ffc504908b5bbc05c14b40a56 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 574.014491] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 574.014491] env[62109]: value = "task-401441" [ 574.014491] env[62109]: _type = "Task" [ 574.014491] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.014977] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8034cb242c85440fa196ec69eb1074ee [ 574.029171] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401441, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.064650] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Downloading image file data 4800b6ec-9841-4c82-b42e-97cce3beeec5 to the data store datastore2 {{(pid=62109) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 574.107089] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18131c4ffc504908b5bbc05c14b40a56 [ 574.157273] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.164102] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62109) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 574.228066] env[62109]: DEBUG nova.network.neutron [-] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.228539] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1549dca2037e4f4b81d5d3f13a5e6ea4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 574.237049] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Acquiring lock "39901fc8-8fc8-4812-936e-0ded3811d61c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.237284] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Lock "39901fc8-8fc8-4812-936e-0ded3811d61c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.243413] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1549dca2037e4f4b81d5d3f13a5e6ea4 [ 574.318993] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.319715] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 18d66d3907aa423480cecd8e21bd3be2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 574.327521] env[62109]: DEBUG nova.scheduler.client.report [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 574.330433] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg b512bce6fdb444859deaa29e84d6bc00 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 574.331480] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18d66d3907aa423480cecd8e21bd3be2 [ 574.350449] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b512bce6fdb444859deaa29e84d6bc00 [ 574.455141] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.522404] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 574.531178] env[62109]: DEBUG oslo_vmware.api [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401441, 'name': PowerOnVM_Task, 'duration_secs': 0.434326} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.531352] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 574.531545] env[62109]: INFO nova.compute.manager [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Took 9.06 seconds to spawn the instance on the hypervisor. [ 574.531890] env[62109]: DEBUG nova.compute.manager [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 574.532612] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a88733-a22f-40c1-aff5-a330cbbd75de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.540133] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 2ff0a69c62f44fe996e04325feec3069 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 574.566471] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 574.566723] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 574.566914] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 574.567142] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 574.567496] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 574.567770] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 574.567996] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 574.568192] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 574.568356] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 574.568509] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 574.568672] env[62109]: DEBUG nova.virt.hardware [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 574.579721] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96949cbd-1a6e-4065-84bf-7787eab32efd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.601349] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ff0a69c62f44fe996e04325feec3069 [ 574.602813] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9708165d-2631-4b15-b8c3-bef67ee53476 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.669187] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "d91c5dae-4ece-4718-a16b-534729f7ba49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.669558] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "d91c5dae-4ece-4718-a16b-534729f7ba49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.737668] env[62109]: INFO nova.compute.manager [-] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Took 1.08 seconds to deallocate network for instance. [ 574.740081] env[62109]: DEBUG nova.compute.claims [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 574.740491] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.822005] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Releasing lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.822727] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 574.823093] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 574.823502] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21c1f31d-a210-42e4-8a15-c719326f64da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.834713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8894ea24-9bd9-4e29-96fb-3d1f138e2f89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.853052] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Completed reading data from the image iterator. {{(pid=62109) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 574.853614] env[62109]: DEBUG oslo_vmware.rw_handles [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62109) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 574.854916] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.855896] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 574.858972] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 63bf1b0b8e9446e0bba6d1475014e0df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 574.860494] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.508s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.862402] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 904a20cc4a5d45c68a705b9280925865 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 574.880439] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 15dc4e5a-da5b-4657-8aec-f501d35d7a58 could not be found. [ 574.880716] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 574.880911] env[62109]: INFO nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Took 0.06 seconds to destroy the instance on the hypervisor. [ 574.881528] env[62109]: DEBUG oslo.service.loopingcall [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.881796] env[62109]: DEBUG nova.compute.manager [-] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 574.881928] env[62109]: DEBUG nova.network.neutron [-] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 574.907125] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 904a20cc4a5d45c68a705b9280925865 [ 574.912247] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63bf1b0b8e9446e0bba6d1475014e0df [ 574.927202] env[62109]: DEBUG nova.network.neutron [-] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.929537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 73d4c3a30d19407f9fcf28e48c8d7b8d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 574.942756] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73d4c3a30d19407f9fcf28e48c8d7b8d [ 575.005025] env[62109]: DEBUG nova.virt.vmwareapi.images [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Downloaded image file data 4800b6ec-9841-4c82-b42e-97cce3beeec5 to vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk on the data store datastore2 {{(pid=62109) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 575.006761] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Caching image {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 575.007014] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Copying Virtual Disk [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk to [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 575.007634] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4314defa-b08c-46f8-a0d5-a9538ac7e8a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.015110] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 575.015110] env[62109]: value = "task-401442" [ 575.015110] env[62109]: _type = "Task" [ 575.015110] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.023756] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.054541] env[62109]: INFO nova.compute.manager [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Took 17.44 seconds to build instance. [ 575.055118] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 9f060b7d863e4a839b089560092c8b67 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.075140] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Acquiring lock "2fb7c1e4-d756-4528-914e-b924c5a3be38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.075402] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Lock "2fb7c1e4-d756-4528-914e-b924c5a3be38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.085823] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f060b7d863e4a839b089560092c8b67 [ 575.175615] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "f1239cdd-d1b3-4494-8204-0fe150737579" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.175841] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "f1239cdd-d1b3-4494-8204-0fe150737579" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.192547] env[62109]: DEBUG nova.compute.manager [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Received event network-changed-10ebfd79-b01d-46c0-8fcf-e500ebe6ca84 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 575.192746] env[62109]: DEBUG nova.compute.manager [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Refreshing instance network info cache due to event network-changed-10ebfd79-b01d-46c0-8fcf-e500ebe6ca84. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 575.192956] env[62109]: DEBUG oslo_concurrency.lockutils [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] Acquiring lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.193132] env[62109]: DEBUG oslo_concurrency.lockutils [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] Acquired lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.193292] env[62109]: DEBUG nova.network.neutron [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Refreshing network info cache for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 575.193704] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] Expecting reply to msg 3b0bd1da956541ed84dd30867ca114de in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.203977] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b0bd1da956541ed84dd30867ca114de [ 575.382293] env[62109]: DEBUG nova.compute.utils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.382951] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg c9caa2f9310d452a9847f604f0f61c32 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.383861] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 575.384033] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 575.387952] env[62109]: ERROR nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. [ 575.387952] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 575.387952] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 575.387952] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 575.387952] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 575.387952] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 575.387952] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 575.387952] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 575.387952] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.387952] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 575.387952] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.387952] env[62109]: ERROR nova.compute.manager raise self.value [ 575.387952] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 575.387952] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 575.387952] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.387952] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 575.388469] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.388469] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 575.388469] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. [ 575.388469] env[62109]: ERROR nova.compute.manager [ 575.388469] env[62109]: Traceback (most recent call last): [ 575.388469] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 575.388469] env[62109]: listener.cb(fileno) [ 575.388469] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 575.388469] env[62109]: result = function(*args, **kwargs) [ 575.388469] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 575.388469] env[62109]: return func(*args, **kwargs) [ 575.388469] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 575.388469] env[62109]: raise e [ 575.388469] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 575.388469] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 575.388469] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 575.388469] env[62109]: created_port_ids = self._update_ports_for_instance( [ 575.388469] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 575.388469] env[62109]: with excutils.save_and_reraise_exception(): [ 575.388469] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.388469] env[62109]: self.force_reraise() [ 575.388469] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.388469] env[62109]: raise self.value [ 575.388469] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 575.388469] env[62109]: updated_port = self._update_port( [ 575.388469] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.388469] env[62109]: _ensure_no_port_binding_failure(port) [ 575.389200] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.389200] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 575.389200] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. [ 575.389200] env[62109]: Removing descriptor: 19 [ 575.389641] env[62109]: ERROR nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Traceback (most recent call last): [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] yield resources [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self.driver.spawn(context, instance, image_meta, [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] vm_ref = self.build_virtual_machine(instance, [ 575.389641] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] vif_infos = vmwarevif.get_vif_info(self._session, [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] for vif in network_info: [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] return self._sync_wrapper(fn, *args, **kwargs) [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self.wait() [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self[:] = self._gt.wait() [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] return self._exit_event.wait() [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 575.390037] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] result = hub.switch() [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] return self.greenlet.switch() [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] result = function(*args, **kwargs) [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] return func(*args, **kwargs) [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] raise e [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] nwinfo = self.network_api.allocate_for_instance( [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] created_port_ids = self._update_ports_for_instance( [ 575.390386] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] with excutils.save_and_reraise_exception(): [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self.force_reraise() [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] raise self.value [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] updated_port = self._update_port( [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] _ensure_no_port_binding_failure(port) [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] raise exception.PortBindingFailed(port_id=port['id']) [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] nova.exception.PortBindingFailed: Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. [ 575.390737] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] [ 575.391096] env[62109]: INFO nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Terminating instance [ 575.392778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Acquiring lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.392991] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Acquired lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.393205] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 575.393684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg c47a271ee3e9464b968c62c35ef9cb96 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.395438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9caa2f9310d452a9847f604f0f61c32 [ 575.402257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c47a271ee3e9464b968c62c35ef9cb96 [ 575.431582] env[62109]: DEBUG nova.network.neutron [-] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.432267] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8c61b7f9b86f4f758abf29360c14925e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.441870] env[62109]: DEBUG nova.policy [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '809b3bdb71dc4e62a733190a8ad2e71b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b55538da1d3740f09e3c5ba6022ec17e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 575.448009] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c61b7f9b86f4f758abf29360c14925e [ 575.526235] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401442, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.557367] env[62109]: DEBUG oslo_concurrency.lockutils [None req-92b2ad58-7f69-4130-b419-4f54899a1ddc tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.963s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.557991] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg a8c0a8e7b4ce49ae82505ebffe58e26c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.574914] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8c0a8e7b4ce49ae82505ebffe58e26c [ 575.621165] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1577bc5b-a3e7-4913-8cec-7c4d5344c7ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.625146] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a4d3dd-bc5a-4600-8634-abd4d4d9a312 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.666186] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a23b7e-432e-4d8c-b412-c14966b46cac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.674737] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360e4f09-fd66-401f-ad03-1e5f57eef9db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.693076] env[62109]: DEBUG nova.compute.provider_tree [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.693076] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 090d7a5fe1a14dc7924ced7a2037ccda in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.699839] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 090d7a5fe1a14dc7924ced7a2037ccda [ 575.726811] env[62109]: DEBUG nova.network.neutron [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.866951] env[62109]: DEBUG nova.network.neutron [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.866951] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] Expecting reply to msg 24ef375f251343738aa5af4a182d0338 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.874934] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24ef375f251343738aa5af4a182d0338 [ 575.894345] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 575.894345] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg ed8d5e46b184409ba547c73341e2fbac in queue reply_7522b64acfeb4981b1f36928b040d568 [ 575.918371] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Successfully created port: ebefe3f7-1348-4203-ab33-a289a4fa30de {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 575.934853] env[62109]: INFO nova.compute.manager [-] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Took 1.05 seconds to deallocate network for instance. [ 575.941078] env[62109]: DEBUG nova.compute.claims [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 575.941266] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.946891] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.955290] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed8d5e46b184409ba547c73341e2fbac [ 576.030862] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401442, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.062496] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 576.064308] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 0fea9c963e984715ab82e47513618748 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 576.071931] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.071931] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg d69d031e50b843e88c14f54d336bc3cc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 576.076218] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d69d031e50b843e88c14f54d336bc3cc [ 576.123200] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fea9c963e984715ab82e47513618748 [ 576.196942] env[62109]: DEBUG nova.scheduler.client.report [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 576.198948] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 9b426eeb20674a78be4de5181893264c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 576.214311] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b426eeb20674a78be4de5181893264c [ 576.368337] env[62109]: DEBUG oslo_concurrency.lockutils [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] Releasing lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.368610] env[62109]: DEBUG nova.compute.manager [req-d2550854-138e-4194-93e0-08c0c79b737d req-fb987ace-1f76-498c-bacc-e6512476f360 service nova] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Received event network-vif-deleted-10ebfd79-b01d-46c0-8fcf-e500ebe6ca84 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 576.398893] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 05edb0b2d53f4f2ea1cc9027387ede48 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 576.458508] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05edb0b2d53f4f2ea1cc9027387ede48 [ 576.505792] env[62109]: DEBUG nova.compute.manager [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Received event network-changed-228e8a13-db93-4bfa-b8bc-ab5292e785b1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 576.506033] env[62109]: DEBUG nova.compute.manager [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Refreshing instance network info cache due to event network-changed-228e8a13-db93-4bfa-b8bc-ab5292e785b1. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 576.506228] env[62109]: DEBUG oslo_concurrency.lockutils [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] Acquiring lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.532394] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401442, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.332527} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.534979] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Copied Virtual Disk [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk to [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 576.534979] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Deleting the datastore file [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5/tmp-sparse.vmdk {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 576.534979] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29aaa061-00a5-43e7-b1cd-ddd32a6140ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.539954] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 576.539954] env[62109]: value = "task-401443" [ 576.539954] env[62109]: _type = "Task" [ 576.539954] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.548740] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401443, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.570894] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Releasing lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.573029] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 576.573029] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 576.581011] env[62109]: DEBUG oslo_concurrency.lockutils [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] Acquired lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.581011] env[62109]: DEBUG nova.network.neutron [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Refreshing network info cache for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 576.581011] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] Expecting reply to msg 916402cb9f384e4d9df718a823cfb0b9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 576.581011] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94c9c3a4-7977-4fc1-b2d4-49e1c8923020 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.590090] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 916402cb9f384e4d9df718a823cfb0b9 [ 576.593211] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ff93a0-9d0c-49d5-bf85-d45173c76365 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.605228] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.627829] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7 could not be found. [ 576.627992] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 576.628282] env[62109]: INFO nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Took 0.06 seconds to destroy the instance on the hypervisor. [ 576.629232] env[62109]: DEBUG oslo.service.loopingcall [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.629232] env[62109]: DEBUG nova.compute.manager [-] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 576.629232] env[62109]: DEBUG nova.network.neutron [-] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 576.648388] env[62109]: DEBUG nova.network.neutron [-] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.648936] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8c94585bbd7f48c981bd910efc663845 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 576.666892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c94585bbd7f48c981bd910efc663845 [ 576.708311] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.846s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.708311] env[62109]: ERROR nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. [ 576.708311] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Traceback (most recent call last): [ 576.708311] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 576.708311] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self.driver.spawn(context, instance, image_meta, [ 576.708311] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 576.708311] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.708311] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.708311] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] vm_ref = self.build_virtual_machine(instance, [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] for vif in network_info: [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] return self._sync_wrapper(fn, *args, **kwargs) [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self.wait() [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self[:] = self._gt.wait() [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] return self._exit_event.wait() [ 576.708753] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] result = hub.switch() [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] return self.greenlet.switch() [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] result = function(*args, **kwargs) [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] return func(*args, **kwargs) [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] raise e [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] nwinfo = self.network_api.allocate_for_instance( [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.709101] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] created_port_ids = self._update_ports_for_instance( [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] with excutils.save_and_reraise_exception(): [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] self.force_reraise() [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] raise self.value [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] updated_port = self._update_port( [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] _ensure_no_port_binding_failure(port) [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.709614] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] raise exception.PortBindingFailed(port_id=port['id']) [ 576.709911] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] nova.exception.PortBindingFailed: Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. [ 576.709911] env[62109]: ERROR nova.compute.manager [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] [ 576.709911] env[62109]: DEBUG nova.compute.utils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 576.711920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.880s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.713031] env[62109]: INFO nova.compute.claims [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.714744] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 3309b71c4303496ba1b1295cec475570 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 576.716325] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Build of instance 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66 was re-scheduled: Binding failed for port bcd13e48-db65-4575-a819-060b93efb820, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 576.717456] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 576.717783] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.718101] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquired lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.718545] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 576.719102] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg ffe9abe5808a48f88b29315a3d75bbcf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 576.733821] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffe9abe5808a48f88b29315a3d75bbcf [ 576.758450] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Acquiring lock "ce54ba3d-2cd5-4400-b334-8443ef73bbff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.758685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Lock "ce54ba3d-2cd5-4400-b334-8443ef73bbff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.810378] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "5f4a5c62-85f1-47ee-b702-1785bfe62f48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.810613] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "5f4a5c62-85f1-47ee-b702-1785bfe62f48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.811747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3309b71c4303496ba1b1295cec475570 [ 576.902214] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 576.934931] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:13:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1089944340',id=19,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1161040494',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 576.935250] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 576.935335] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 576.935514] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 576.935654] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 576.935796] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 576.936412] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 576.936668] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 576.936850] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 576.937010] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 576.937194] env[62109]: DEBUG nova.virt.hardware [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 576.938095] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38be3cc-1a78-4793-b881-1114c299a243 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.949556] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900d4f6e-1645-426e-a802-1d39abbea1fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.049596] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401443, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037274} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.049856] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 577.050062] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Moving file from [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659/4800b6ec-9841-4c82-b42e-97cce3beeec5 to [datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5. {{(pid=62109) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 577.050312] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-fb34778f-a988-4119-b399-e273842d622d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.057104] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 577.057104] env[62109]: value = "task-401444" [ 577.057104] env[62109]: _type = "Task" [ 577.057104] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.065283] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401444, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.154751] env[62109]: DEBUG nova.network.neutron [-] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.155254] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 500825160c144293b672302224a0a040 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 577.156988] env[62109]: DEBUG nova.network.neutron [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.166905] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 500825160c144293b672302224a0a040 [ 577.226035] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 96245de2bdda48df933c040381a553d9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 577.236624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96245de2bdda48df933c040381a553d9 [ 577.245401] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.348645] env[62109]: DEBUG nova.network.neutron [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.349224] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] Expecting reply to msg c3c165d60f014d42aeedbf85008bd0a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 577.358025] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.358563] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg f42ec8ba73744265aea43c0c9c32581b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 577.362813] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3c165d60f014d42aeedbf85008bd0a5 [ 577.367909] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f42ec8ba73744265aea43c0c9c32581b [ 577.443230] env[62109]: ERROR nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. [ 577.443230] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 577.443230] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.443230] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 577.443230] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 577.443230] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 577.443230] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 577.443230] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 577.443230] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.443230] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 577.443230] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.443230] env[62109]: ERROR nova.compute.manager raise self.value [ 577.443230] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 577.443230] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 577.443230] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.443230] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 577.443761] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.443761] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 577.443761] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. [ 577.443761] env[62109]: ERROR nova.compute.manager [ 577.443761] env[62109]: Traceback (most recent call last): [ 577.443761] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 577.443761] env[62109]: listener.cb(fileno) [ 577.443761] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.443761] env[62109]: result = function(*args, **kwargs) [ 577.443761] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 577.443761] env[62109]: return func(*args, **kwargs) [ 577.443761] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 577.443761] env[62109]: raise e [ 577.443761] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.443761] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 577.443761] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 577.443761] env[62109]: created_port_ids = self._update_ports_for_instance( [ 577.443761] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 577.443761] env[62109]: with excutils.save_and_reraise_exception(): [ 577.443761] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.443761] env[62109]: self.force_reraise() [ 577.443761] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.443761] env[62109]: raise self.value [ 577.443761] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 577.443761] env[62109]: updated_port = self._update_port( [ 577.443761] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.443761] env[62109]: _ensure_no_port_binding_failure(port) [ 577.443761] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.443761] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 577.444567] env[62109]: nova.exception.PortBindingFailed: Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. [ 577.444567] env[62109]: Removing descriptor: 16 [ 577.444567] env[62109]: ERROR nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Traceback (most recent call last): [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] yield resources [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self.driver.spawn(context, instance, image_meta, [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 577.444567] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] vm_ref = self.build_virtual_machine(instance, [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] for vif in network_info: [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] return self._sync_wrapper(fn, *args, **kwargs) [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self.wait() [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self[:] = self._gt.wait() [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] return self._exit_event.wait() [ 577.444895] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] result = hub.switch() [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] return self.greenlet.switch() [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] result = function(*args, **kwargs) [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] return func(*args, **kwargs) [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] raise e [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] nwinfo = self.network_api.allocate_for_instance( [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 577.445240] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] created_port_ids = self._update_ports_for_instance( [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] with excutils.save_and_reraise_exception(): [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self.force_reraise() [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] raise self.value [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] updated_port = self._update_port( [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] _ensure_no_port_binding_failure(port) [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.445597] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] raise exception.PortBindingFailed(port_id=port['id']) [ 577.445918] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] nova.exception.PortBindingFailed: Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. [ 577.445918] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] [ 577.445918] env[62109]: INFO nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Terminating instance [ 577.449877] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Acquiring lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.449877] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Acquired lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.449877] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 577.449877] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg e8dd67bd151b4f778a157239a9e85283 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 577.458054] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8dd67bd151b4f778a157239a9e85283 [ 577.568511] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401444, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.030595} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.568751] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] File moved {{(pid=62109) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 577.568983] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Cleaning up location [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 577.569092] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Deleting the datastore file [datastore2] vmware_temp/c6f0b727-8b6d-4818-a46e-0a31a3650659 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 577.569343] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-146bbec6-f3ae-4273-9a86-19d47c19d1ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.579386] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 577.579386] env[62109]: value = "task-401445" [ 577.579386] env[62109]: _type = "Task" [ 577.579386] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.587705] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.659768] env[62109]: INFO nova.compute.manager [-] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Took 1.03 seconds to deallocate network for instance. [ 577.662351] env[62109]: DEBUG nova.compute.claims [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 577.662728] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.855755] env[62109]: DEBUG oslo_concurrency.lockutils [req-57c6d235-9b46-497a-b475-9ef0ca19c230 req-3a2281ba-d38f-4898-8435-807a4b04bfad service nova] Releasing lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.862463] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Releasing lock "refresh_cache-9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.863092] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 577.863386] env[62109]: DEBUG nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 577.863658] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 577.937195] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.938036] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg c2babec38fa1433bbe248284a86a062a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 577.946999] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2babec38fa1433bbe248284a86a062a [ 577.978498] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.002210] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f88aa8b-bd88-4b22-ac37-8b45a832d99f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.012852] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12352336-566d-4825-b77c-66f38cd37a3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.063171] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a381d96-5cae-42fe-9501-7478d2889c9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.078551] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95be4bff-8a44-4c5e-824c-7c8ff2044ca4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.098346] env[62109]: DEBUG nova.compute.provider_tree [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.098847] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg c6d04104da0f40efbf966600823c8ac0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.103374] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.058368} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.103603] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 578.104318] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3c3ab04-5ec8-4fa1-ae9e-f14cf3c80205 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.115472] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6d04104da0f40efbf966600823c8ac0 [ 578.119562] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 578.119562] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]524f5bbf-bb32-57da-4085-38798ecb0a8d" [ 578.119562] env[62109]: _type = "Task" [ 578.119562] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.129833] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]524f5bbf-bb32-57da-4085-38798ecb0a8d, 'name': SearchDatastore_Task, 'duration_secs': 0.01486} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.130089] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.130306] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore2] 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55/9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 578.130592] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a31e12f-d6bd-4ded-a5d8-f64284c49634 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.135474] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.136416] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 35527e8a0ddf470bacf2c6b0071d3a8c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.140650] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 578.140650] env[62109]: value = "task-401446" [ 578.140650] env[62109]: _type = "Task" [ 578.140650] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.149539] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "56f9bb28-2770-46aa-9d95-f60cdeae0967" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.149771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "56f9bb28-2770-46aa-9d95-f60cdeae0967" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.154863] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401446, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.169584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35527e8a0ddf470bacf2c6b0071d3a8c [ 578.203784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 909e9f893b9c4531b1980e08312787c8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.214360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 909e9f893b9c4531b1980e08312787c8 [ 578.441111] env[62109]: DEBUG nova.network.neutron [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.441651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg e558b2ad7c504739806df8f7b0d396d6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.450386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e558b2ad7c504739806df8f7b0d396d6 [ 578.601586] env[62109]: DEBUG nova.scheduler.client.report [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.604205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg acb34adc446841c3a88aa25021a3e603 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.618935] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acb34adc446841c3a88aa25021a3e603 [ 578.639673] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Releasing lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.640152] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 578.640343] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 578.640640] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a835f482-eb57-47d4-ac09-4a04a9e6f08c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.651957] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401446, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.658260] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05b1908-9d78-45d3-b50b-1b7403aeefc8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.683673] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2a473614-2051-47ab-a9bc-f87385a264cd could not be found. [ 578.684050] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 578.684122] env[62109]: INFO nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 578.684476] env[62109]: DEBUG oslo.service.loopingcall [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.685766] env[62109]: DEBUG nova.compute.manager [-] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 578.685839] env[62109]: DEBUG nova.network.neutron [-] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 578.688890] env[62109]: DEBUG nova.compute.manager [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Received event network-changed-38c57439-2b4f-48ef-884c-f248749447a4 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 578.689095] env[62109]: DEBUG nova.compute.manager [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Refreshing instance network info cache due to event network-changed-38c57439-2b4f-48ef-884c-f248749447a4. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 578.689312] env[62109]: DEBUG oslo_concurrency.lockutils [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Acquiring lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.689448] env[62109]: DEBUG oslo_concurrency.lockutils [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Acquired lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.689598] env[62109]: DEBUG nova.network.neutron [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Refreshing network info cache for port 38c57439-2b4f-48ef-884c-f248749447a4 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 578.690186] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Expecting reply to msg d175f5bb39d0482a89f2ebab4901422a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.699199] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d175f5bb39d0482a89f2ebab4901422a [ 578.711882] env[62109]: DEBUG nova.compute.manager [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 578.713636] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b76f75-62e2-48b2-b7bf-fcdce1d391b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.725917] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 8cb8453c2aaf40a8b37801bc0e0db91d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.762664] env[62109]: DEBUG nova.network.neutron [-] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.763499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e30dfc2d4c3a45379e03140707eb0fb8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.780550] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e30dfc2d4c3a45379e03140707eb0fb8 [ 578.809385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cb8453c2aaf40a8b37801bc0e0db91d [ 578.945738] env[62109]: INFO nova.compute.manager [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66] Took 1.08 seconds to deallocate network for instance. [ 578.947592] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg d22ee34f3ad146ab8c01eecb9c7b0b7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 578.992963] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d22ee34f3ad146ab8c01eecb9c7b0b7d [ 579.115921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.115921] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 579.115921] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 6a065fef69d14163983b98a0d947f845 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.115921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.125s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.115921] env[62109]: INFO nova.compute.claims [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.117654] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 5aae5a0b50cd499aaf75291925c23e69 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.149351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a065fef69d14163983b98a0d947f845 [ 579.154025] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401446, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.165738] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aae5a0b50cd499aaf75291925c23e69 [ 579.211688] env[62109]: DEBUG nova.network.neutron [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 579.229053] env[62109]: INFO nova.compute.manager [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] instance snapshotting [ 579.229751] env[62109]: DEBUG nova.objects.instance [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lazy-loading 'flavor' on Instance uuid 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 579.231189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 0da662fb4bd3483a85802fbdb7ba22fd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.266881] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0da662fb4bd3483a85802fbdb7ba22fd [ 579.271437] env[62109]: DEBUG nova.network.neutron [-] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.274212] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c4563115f17c4f84868583ade33f2a54 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.282926] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4563115f17c4f84868583ade33f2a54 [ 579.343294] env[62109]: DEBUG nova.network.neutron [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.344223] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Expecting reply to msg eecde9ce710c4b518de4b64072f33f7f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.356259] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eecde9ce710c4b518de4b64072f33f7f [ 579.409961] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 762c8bc345e44fd78ed451f6c061f5cb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.420024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 762c8bc345e44fd78ed451f6c061f5cb [ 579.451908] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 9f05abd7dd8e4a38a1892081a15bd750 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.454978] env[62109]: DEBUG nova.compute.manager [req-9eb2b7d0-37ce-43f1-bc75-a2554e882b53 req-c3fdd478-1f90-4f44-95b4-d22f98d3a5c7 service nova] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Received event network-vif-deleted-228e8a13-db93-4bfa-b8bc-ab5292e785b1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 579.497602] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f05abd7dd8e4a38a1892081a15bd750 [ 579.620675] env[62109]: DEBUG nova.compute.utils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 579.621668] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 54c6c511d92d444b8934b6b434ae22c3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.624427] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 68f96c7f7fe240f88e1844cd17df2869 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.625858] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 579.626051] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 579.631300] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68f96c7f7fe240f88e1844cd17df2869 [ 579.637580] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54c6c511d92d444b8934b6b434ae22c3 [ 579.654155] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401446, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.728663] env[62109]: DEBUG nova.policy [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50add8117fb046ff9d5efdc0f40fcbfb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cacd2ccfb22c4f73abbe2d5670d1fbb9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 579.736588] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d509eaf-649d-4d31-9868-1b99efed8f33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.753246] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d67eb5-9d10-4b85-8c85-9fdd84326955 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.761182] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 4d91efcbb2b44e86982c24fcb100567a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.775243] env[62109]: INFO nova.compute.manager [-] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Took 1.09 seconds to deallocate network for instance. [ 579.777345] env[62109]: DEBUG nova.compute.claims [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 579.777515] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.796605] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d91efcbb2b44e86982c24fcb100567a [ 579.850043] env[62109]: DEBUG oslo_concurrency.lockutils [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Releasing lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.850355] env[62109]: DEBUG nova.compute.manager [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Received event network-vif-deleted-38c57439-2b4f-48ef-884c-f248749447a4 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 579.850590] env[62109]: DEBUG nova.compute.manager [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Received event network-changed-ebefe3f7-1348-4203-ab33-a289a4fa30de {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 579.850784] env[62109]: DEBUG nova.compute.manager [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Refreshing instance network info cache due to event network-changed-ebefe3f7-1348-4203-ab33-a289a4fa30de. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 579.851024] env[62109]: DEBUG oslo_concurrency.lockutils [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Acquiring lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.851198] env[62109]: DEBUG oslo_concurrency.lockutils [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Acquired lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.851391] env[62109]: DEBUG nova.network.neutron [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Refreshing network info cache for port ebefe3f7-1348-4203-ab33-a289a4fa30de {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 579.851878] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Expecting reply to msg 75141c2b64a5479e9f218f54af6ec814 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.858982] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75141c2b64a5479e9f218f54af6ec814 [ 579.912867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquiring lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.913219] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.913701] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquiring lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.913701] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.914015] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.918121] env[62109]: INFO nova.compute.manager [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Terminating instance [ 579.919999] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquiring lock "refresh_cache-7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.920110] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquired lock "refresh_cache-7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.920331] env[62109]: DEBUG nova.network.neutron [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 579.920760] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 018e2c65dab742f992c0077ce02f61fb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 579.928154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 018e2c65dab742f992c0077ce02f61fb [ 580.004483] env[62109]: INFO nova.scheduler.client.report [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Deleted allocations for instance 9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66 [ 580.007469] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 382b4e75dc1e4293b7a3a6aafb3a858b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 580.019888] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 382b4e75dc1e4293b7a3a6aafb3a858b [ 580.131806] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 580.131806] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 54632ed9a04e42148672e095d0946c15 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 580.155224] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401446, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.779952} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.155472] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore2] 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55/9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 580.155677] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 580.155911] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6106b51a-4fd4-4a1b-b5c6-6c07ab906d46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.161843] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 580.161843] env[62109]: value = "task-401447" [ 580.161843] env[62109]: _type = "Task" [ 580.161843] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.173398] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401447, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.196908] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54632ed9a04e42148672e095d0946c15 [ 580.267299] env[62109]: DEBUG nova.compute.manager [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Instance disappeared during snapshot {{(pid=62109) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 580.386692] env[62109]: DEBUG nova.network.neutron [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 580.416827] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cb54bf-e565-4d73-b1a2-a8739d5e5ab1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.424826] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c2abae-b78e-4a82-90d8-205df48eb537 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.464200] env[62109]: DEBUG nova.compute.manager [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Found 0 images (rotation: 2) {{(pid=62109) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 580.464596] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c568f383-5f47-462e-aa81-f32d554061e2 tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 3410e7ccdc1a44cba27bd8da9c66dd75 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 580.466342] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f8902f-42fd-47e4-9476-420afea4598a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.475277] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7236ba1-9855-4a0f-bedf-562e1a9bf572 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.479994] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3410e7ccdc1a44cba27bd8da9c66dd75 [ 580.492475] env[62109]: DEBUG nova.compute.provider_tree [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.493075] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 050a7e6329314b16ae839dc5aefdc343 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 580.508020] env[62109]: DEBUG nova.network.neutron [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 580.510830] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 050a7e6329314b16ae839dc5aefdc343 [ 580.515800] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4797d1cc-2262-489e-896f-329d69801242 tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "9cbf81d9-92b5-4edf-8e7e-f4caa8f44f66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.421s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.516310] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 28ce2de85ba74b1fb61deb931b873626 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 580.529394] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28ce2de85ba74b1fb61deb931b873626 [ 580.636831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg bffde809cc974dbdb2d516b060132c14 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 580.644806] env[62109]: DEBUG nova.network.neutron [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.645282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 29c8860a26504cae9bbe43890d8e1e0a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 580.659189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29c8860a26504cae9bbe43890d8e1e0a [ 580.671317] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401447, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091868} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.671602] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 580.673642] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14b14f9-f926-4615-b270-e0eb5e944d50 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.703806] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55/9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 580.704645] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bffde809cc974dbdb2d516b060132c14 [ 580.704928] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef54a3ef-cc56-497a-84f6-5b46612f9265 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.741465] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 580.741465] env[62109]: value = "task-401448" [ 580.741465] env[62109]: _type = "Task" [ 580.741465] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.757063] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401448, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.907250] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Acquiring lock "35411b03-ace3-40da-8c3e-3872ac003bd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.907490] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Lock "35411b03-ace3-40da-8c3e-3872ac003bd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.953322] env[62109]: DEBUG nova.network.neutron [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.953781] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Expecting reply to msg e2093ed1970c46419d24a49e8f627b51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 580.971322] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2093ed1970c46419d24a49e8f627b51 [ 580.995407] env[62109]: DEBUG nova.scheduler.client.report [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 580.998228] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 05f0fc7cbd994d4cbb1caa174fed3d4b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 581.014017] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05f0fc7cbd994d4cbb1caa174fed3d4b [ 581.018938] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 581.021303] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 9986d507ec794d57a0a392087df5f9a0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 581.116473] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9986d507ec794d57a0a392087df5f9a0 [ 581.140901] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 581.148026] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Releasing lock "refresh_cache-7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.148026] env[62109]: DEBUG nova.compute.manager [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 581.148201] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 581.149129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c641b867-c1dc-451c-be35-c972e066da46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.157217] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 581.157501] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4c71f75-ea4e-4706-bc05-bde510651732 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.169163] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 581.169408] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 581.169516] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.169690] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 581.169837] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.170222] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 581.170222] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 581.170474] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 581.171941] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 581.172136] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 581.172316] env[62109]: DEBUG nova.virt.hardware [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 581.173050] env[62109]: DEBUG oslo_vmware.api [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 581.173050] env[62109]: value = "task-401449" [ 581.173050] env[62109]: _type = "Task" [ 581.173050] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.173853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfb26b5-6af0-43cd-bca7-038329a8a470 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.177223] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Successfully created port: 636f6a96-849f-411e-a602-2d048aa9867a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.190980] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348e28ad-aa55-4c32-94ba-a645144ab3e3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.195337] env[62109]: DEBUG oslo_vmware.api [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.250885] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401448, 'name': ReconfigVM_Task, 'duration_secs': 0.275323} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.251233] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55/9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 581.252237] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-148e5b72-835c-4742-928c-19ebcd34fa03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.258542] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 581.258542] env[62109]: value = "task-401450" [ 581.258542] env[62109]: _type = "Task" [ 581.258542] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.273677] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401450, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.456725] env[62109]: DEBUG oslo_concurrency.lockutils [req-33c87e5f-0880-4165-8d00-801271fc40d9 req-42ac2bcd-4758-4df6-b12e-cc7a88f03aea service nova] Releasing lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.500955] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.501487] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 581.503250] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 7a9909066a0f4273bd95212439210bf3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 581.504449] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.049s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.505657] env[62109]: INFO nova.compute.claims [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.507543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg be6010b11e7c4a38a6873c199f1eda85 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 581.551252] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.560866] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a9909066a0f4273bd95212439210bf3 [ 581.566523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be6010b11e7c4a38a6873c199f1eda85 [ 581.691333] env[62109]: DEBUG oslo_vmware.api [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401449, 'name': PowerOffVM_Task, 'duration_secs': 0.143257} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.691333] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 581.691333] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 581.691333] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7524b68e-5365-4739-90fd-2d4083408295 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.716129] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 581.716129] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 581.716129] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Deleting the datastore file [datastore1] 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 581.716129] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a697ceaf-10f1-44d0-b3c4-9191fa9ab816 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.722781] env[62109]: DEBUG oslo_vmware.api [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for the task: (returnval){ [ 581.722781] env[62109]: value = "task-401452" [ 581.722781] env[62109]: _type = "Task" [ 581.722781] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.732683] env[62109]: DEBUG oslo_vmware.api [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.768415] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401450, 'name': Rename_Task, 'duration_secs': 0.132446} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.768874] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 581.769241] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cccbf9b-803c-4888-8521-b67ed4799d7d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.775190] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 581.775190] env[62109]: value = "task-401453" [ 581.775190] env[62109]: _type = "Task" [ 581.775190] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.784259] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401453, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.015647] env[62109]: DEBUG nova.compute.utils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.015647] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 1167ceb357a54a9c94ad9287e9bc8081 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 582.015647] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 582.015647] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 582.019417] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 23b9746df87341d282c3f717ca3dbf9c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 582.027121] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1167ceb357a54a9c94ad9287e9bc8081 [ 582.033430] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23b9746df87341d282c3f717ca3dbf9c [ 582.131909] env[62109]: DEBUG nova.compute.manager [req-64d2f954-d503-4aae-b177-83df5a0868a4 req-d3d493be-e89c-41b4-84bd-0434e33177ca service nova] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Received event network-vif-deleted-ebefe3f7-1348-4203-ab33-a289a4fa30de {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 582.137563] env[62109]: DEBUG nova.policy [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4ea22bb21004f69a2b27d306493db45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '423f777bec3c474a91970fce3e308097', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 582.232024] env[62109]: DEBUG oslo_vmware.api [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Task: {'id': task-401452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103882} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.232217] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 582.232433] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 582.232555] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 582.232721] env[62109]: INFO nova.compute.manager [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Took 1.08 seconds to destroy the instance on the hypervisor. [ 582.232951] env[62109]: DEBUG oslo.service.loopingcall [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 582.233129] env[62109]: DEBUG nova.compute.manager [-] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 582.233217] env[62109]: DEBUG nova.network.neutron [-] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 582.259352] env[62109]: DEBUG nova.network.neutron [-] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 582.259942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e70caf9d551b460b8a12d3bb0ca99a68 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 582.282078] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e70caf9d551b460b8a12d3bb0ca99a68 [ 582.285637] env[62109]: DEBUG oslo_vmware.api [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401453, 'name': PowerOnVM_Task, 'duration_secs': 0.410921} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.285899] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 582.286103] env[62109]: INFO nova.compute.manager [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Took 10.06 seconds to spawn the instance on the hypervisor. [ 582.286297] env[62109]: DEBUG nova.compute.manager [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 582.287024] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146c9eda-e8f6-4928-b35d-2167c7af2b62 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.299579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 11f96c662c784f1e814ca1c901f3b000 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 582.348533] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11f96c662c784f1e814ca1c901f3b000 [ 582.519014] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 582.519014] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 0ec8144685064e89b5bdcf6ed301ff13 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 582.570082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ec8144685064e89b5bdcf6ed301ff13 [ 582.765860] env[62109]: DEBUG nova.network.neutron [-] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.765860] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cec1c32dfc21427c9dacb1fd77e12bf1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 582.774666] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cec1c32dfc21427c9dacb1fd77e12bf1 [ 582.819809] env[62109]: INFO nova.compute.manager [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Took 22.19 seconds to build instance. [ 582.820375] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg d745c9d5ccc34c70a33773bcdaed02b9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 582.827774] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9705590a-6c18-4375-8cf1-9de41e706dcc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.836442] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7bab32-1e72-41ae-98c1-2586a0c1bedd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.840381] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d745c9d5ccc34c70a33773bcdaed02b9 [ 582.873149] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c0935f-2f8b-4889-a0e6-2878a0fda074 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.883252] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4b1431-19cd-4826-8ebc-a6da1ce26cf1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.899166] env[62109]: DEBUG nova.compute.provider_tree [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.899722] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg e5163b03799e4a9a8dbdc504657d5349 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 582.906244] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5163b03799e4a9a8dbdc504657d5349 [ 582.969806] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Successfully created port: 4bf704f8-170e-48fa-9d73-18fa03b4afe7 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.023321] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 8809092bc4a5416d87b09742dfd5347a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 583.106928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8809092bc4a5416d87b09742dfd5347a [ 583.267799] env[62109]: INFO nova.compute.manager [-] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Took 1.03 seconds to deallocate network for instance. [ 583.272222] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg e5e25dcf551f486aa2c88dcb3895c5c0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 583.312048] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5e25dcf551f486aa2c88dcb3895c5c0 [ 583.323378] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1bf2d3ef-46ea-4fa4-8ca2-07de0503504d tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.698s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.323945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 5cd335ba8a2e4af391e002e83e33cf99 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 583.334850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cd335ba8a2e4af391e002e83e33cf99 [ 583.402456] env[62109]: DEBUG nova.scheduler.client.report [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 583.404850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg ac8f262804e94e6292f47592e323cc7f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 583.417483] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac8f262804e94e6292f47592e323cc7f [ 583.527229] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 583.554453] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.554708] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.554874] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.555079] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.555235] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.555398] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.555605] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.555757] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.555955] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.556170] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.556356] env[62109]: DEBUG nova.virt.hardware [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.557217] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06801fd1-3045-4322-8d83-058938a7065d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.565813] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4434df-26b7-485f-a688-39636de6d52c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.775947] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.826781] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 583.828601] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg c615f47fd9074c74a8991f3779e82a46 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 583.869720] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c615f47fd9074c74a8991f3779e82a46 [ 583.907207] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.907702] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 583.909430] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg fa030fab82964548bd0be141422d0d66 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 583.910385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.170s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.912060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 378b0732b9ce4e529443717d58e099ad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 583.951123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 378b0732b9ce4e529443717d58e099ad [ 583.959827] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa030fab82964548bd0be141422d0d66 [ 584.352621] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.418263] env[62109]: DEBUG nova.compute.utils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 584.419141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg bf75e24cc4a341a9bc5468c8f3b97dad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 584.420484] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 584.420738] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 584.429714] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf75e24cc4a341a9bc5468c8f3b97dad [ 584.515699] env[62109]: DEBUG nova.policy [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7677135908304a35935a9de31e0e5fd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '88391b50bec241769f291a70ba99dfb9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 584.678029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08e56b2-ea02-4f0b-89cd-74a95216b5ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.687461] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e3f5b5-3f6d-482a-a12c-db01ce0a4ef5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.729407] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f211706f-fc9b-48f8-afb1-d3761677afe1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.737254] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d280cbcf-7885-4f9c-9ba7-8a2cf77c4a4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.756203] env[62109]: DEBUG nova.compute.provider_tree [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.757110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 7cf42be57bf940ffab1c9771edb2c037 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 584.764437] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cf42be57bf940ffab1c9771edb2c037 [ 584.924019] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 584.926123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 990635d1d4814fe181b4637f57058da1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 584.972613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 990635d1d4814fe181b4637f57058da1 [ 585.062699] env[62109]: DEBUG nova.compute.manager [None req-a0ef4d34-f687-4d5c-91aa-cfdde34151c5 tempest-ServerDiagnosticsV248Test-86897178 tempest-ServerDiagnosticsV248Test-86897178-project-admin] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 585.063813] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7ba62d-0660-4653-a837-272c4ef56f95 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.070919] env[62109]: INFO nova.compute.manager [None req-a0ef4d34-f687-4d5c-91aa-cfdde34151c5 tempest-ServerDiagnosticsV248Test-86897178 tempest-ServerDiagnosticsV248Test-86897178-project-admin] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Retrieving diagnostics [ 585.071874] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56da143f-4b93-4584-b4e8-d8b3fa14c781 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.254859] env[62109]: ERROR nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. [ 585.254859] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 585.254859] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.254859] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 585.254859] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 585.254859] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 585.254859] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 585.254859] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 585.254859] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.254859] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 585.254859] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.254859] env[62109]: ERROR nova.compute.manager raise self.value [ 585.254859] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 585.254859] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 585.254859] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.254859] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 585.255380] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.255380] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 585.255380] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. [ 585.255380] env[62109]: ERROR nova.compute.manager [ 585.255380] env[62109]: Traceback (most recent call last): [ 585.255380] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 585.255380] env[62109]: listener.cb(fileno) [ 585.255380] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.255380] env[62109]: result = function(*args, **kwargs) [ 585.255380] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 585.255380] env[62109]: return func(*args, **kwargs) [ 585.255380] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.255380] env[62109]: raise e [ 585.255380] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.255380] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 585.255380] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 585.255380] env[62109]: created_port_ids = self._update_ports_for_instance( [ 585.255380] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 585.255380] env[62109]: with excutils.save_and_reraise_exception(): [ 585.255380] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.255380] env[62109]: self.force_reraise() [ 585.255380] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.255380] env[62109]: raise self.value [ 585.255380] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 585.255380] env[62109]: updated_port = self._update_port( [ 585.255380] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.255380] env[62109]: _ensure_no_port_binding_failure(port) [ 585.255380] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.255380] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 585.256389] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. [ 585.256389] env[62109]: Removing descriptor: 16 [ 585.256389] env[62109]: ERROR nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Traceback (most recent call last): [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] yield resources [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self.driver.spawn(context, instance, image_meta, [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.256389] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] vm_ref = self.build_virtual_machine(instance, [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] for vif in network_info: [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] return self._sync_wrapper(fn, *args, **kwargs) [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self.wait() [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self[:] = self._gt.wait() [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] return self._exit_event.wait() [ 585.256759] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] result = hub.switch() [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] return self.greenlet.switch() [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] result = function(*args, **kwargs) [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] return func(*args, **kwargs) [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] raise e [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] nwinfo = self.network_api.allocate_for_instance( [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 585.257150] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] created_port_ids = self._update_ports_for_instance( [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] with excutils.save_and_reraise_exception(): [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self.force_reraise() [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] raise self.value [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] updated_port = self._update_port( [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] _ensure_no_port_binding_failure(port) [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.257551] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] raise exception.PortBindingFailed(port_id=port['id']) [ 585.258022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] nova.exception.PortBindingFailed: Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. [ 585.258022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] [ 585.258022] env[62109]: INFO nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Terminating instance [ 585.258306] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Acquiring lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.258350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Acquired lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.258481] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.259016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 15158975db914135815e02aab89b2673 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 585.260542] env[62109]: DEBUG nova.scheduler.client.report [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 585.263416] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 62ecfa1404a14bcca03886d48f27fec4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 585.271819] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15158975db914135815e02aab89b2673 [ 585.274581] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62ecfa1404a14bcca03886d48f27fec4 [ 585.343660] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Successfully created port: 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 585.431140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 22af86337b494bf093db2b73496270a4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 585.480348] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22af86337b494bf093db2b73496270a4 [ 585.767694] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.857s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.768077] env[62109]: ERROR nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Traceback (most recent call last): [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self.driver.spawn(context, instance, image_meta, [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] vm_ref = self.build_virtual_machine(instance, [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.768077] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] for vif in network_info: [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] return self._sync_wrapper(fn, *args, **kwargs) [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self.wait() [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self[:] = self._gt.wait() [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] return self._exit_event.wait() [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] result = hub.switch() [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.768404] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] return self.greenlet.switch() [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] result = function(*args, **kwargs) [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] return func(*args, **kwargs) [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] raise e [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] nwinfo = self.network_api.allocate_for_instance( [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] created_port_ids = self._update_ports_for_instance( [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] with excutils.save_and_reraise_exception(): [ 585.768801] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] self.force_reraise() [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] raise self.value [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] updated_port = self._update_port( [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] _ensure_no_port_binding_failure(port) [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] raise exception.PortBindingFailed(port_id=port['id']) [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] nova.exception.PortBindingFailed: Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. [ 585.769172] env[62109]: ERROR nova.compute.manager [instance: 22179940-4e5b-4879-be19-a9addb0a628c] [ 585.769470] env[62109]: DEBUG nova.compute.utils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 585.770419] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.829s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.771901] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg b74c406d047a449aa6dc93cfcfe3e9d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 585.773087] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Build of instance 22179940-4e5b-4879-be19-a9addb0a628c was re-scheduled: Binding failed for port 10ebfd79-b01d-46c0-8fcf-e500ebe6ca84, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 585.773525] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 585.773737] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.773877] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.774027] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.774410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 9532d9e82da94155bb77760b3814987a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 585.782738] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9532d9e82da94155bb77760b3814987a [ 585.806555] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.816567] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b74c406d047a449aa6dc93cfcfe3e9d0 [ 585.934205] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 585.960732] env[62109]: DEBUG nova.compute.manager [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Received event network-changed-636f6a96-849f-411e-a602-2d048aa9867a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 585.960975] env[62109]: DEBUG nova.compute.manager [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Refreshing instance network info cache due to event network-changed-636f6a96-849f-411e-a602-2d048aa9867a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 585.961194] env[62109]: DEBUG oslo_concurrency.lockutils [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] Acquiring lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.972498] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 585.972969] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 585.972969] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 585.973101] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 585.973235] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 585.973390] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 585.973613] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 585.973768] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 585.973931] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 585.974088] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 585.974301] env[62109]: DEBUG nova.virt.hardware [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 585.975498] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddad8385-86d2-46d8-adde-520d59f7d560 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.985269] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0466b38-db82-454e-918f-6b6a1e2eb973 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.148386] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.148386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 7f3dc399718a49aaacd13d1cde5f019c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 586.160275] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f3dc399718a49aaacd13d1cde5f019c [ 586.357233] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.589558] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34012a5e-31c7-4cbd-94f4-aec73339731a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.599645] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00190675-1449-4b3b-b978-801b62c4e314 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.646440] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a86bd1-ddc4-4aa3-881e-4bb8e849f9fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.650063] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Releasing lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.650471] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 586.650661] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 586.650947] env[62109]: DEBUG oslo_concurrency.lockutils [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] Acquired lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.651113] env[62109]: DEBUG nova.network.neutron [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Refreshing network info cache for port 636f6a96-849f-411e-a602-2d048aa9867a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 586.651545] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] Expecting reply to msg 63227b26edf749cf85fef9b58ddbe89e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 586.652347] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74601780-85b1-4398-a3d5-074343e3691a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.659851] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5db0ee-b097-4e62-992e-b09632643718 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.663872] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63227b26edf749cf85fef9b58ddbe89e [ 586.666743] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0253d8-769b-4fda-97db-ad91eed377e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.689629] env[62109]: DEBUG nova.compute.provider_tree [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.690214] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 57aedc1eed454550a3bc9c09a546c743 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 586.695225] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd could not be found. [ 586.695434] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 586.695607] env[62109]: INFO nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 586.695879] env[62109]: DEBUG oslo.service.loopingcall [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.696319] env[62109]: DEBUG nova.compute.manager [-] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 586.697009] env[62109]: DEBUG nova.network.neutron [-] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 586.702016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57aedc1eed454550a3bc9c09a546c743 [ 586.761427] env[62109]: DEBUG nova.network.neutron [-] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.761791] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 09bd59260e7f4141aebae70f56cdf1f9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 586.775366] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09bd59260e7f4141aebae70f56cdf1f9 [ 586.850315] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.850866] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 26a5c670d56045e6949f5c429f86cb1b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 586.860321] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26a5c670d56045e6949f5c429f86cb1b [ 587.176705] env[62109]: ERROR nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. [ 587.176705] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 587.176705] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.176705] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 587.176705] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.176705] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 587.176705] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.176705] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 587.176705] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.176705] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 587.176705] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.176705] env[62109]: ERROR nova.compute.manager raise self.value [ 587.176705] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.176705] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 587.176705] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.176705] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 587.177245] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.177245] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 587.177245] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. [ 587.177245] env[62109]: ERROR nova.compute.manager [ 587.177245] env[62109]: Traceback (most recent call last): [ 587.177245] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 587.177245] env[62109]: listener.cb(fileno) [ 587.177245] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.177245] env[62109]: result = function(*args, **kwargs) [ 587.177245] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 587.177245] env[62109]: return func(*args, **kwargs) [ 587.177245] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.177245] env[62109]: raise e [ 587.177245] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.177245] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 587.177245] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.177245] env[62109]: created_port_ids = self._update_ports_for_instance( [ 587.177245] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.177245] env[62109]: with excutils.save_and_reraise_exception(): [ 587.177245] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.177245] env[62109]: self.force_reraise() [ 587.177245] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.177245] env[62109]: raise self.value [ 587.177245] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.177245] env[62109]: updated_port = self._update_port( [ 587.177245] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.177245] env[62109]: _ensure_no_port_binding_failure(port) [ 587.177245] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.177245] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 587.178164] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. [ 587.178164] env[62109]: Removing descriptor: 14 [ 587.178164] env[62109]: ERROR nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Traceback (most recent call last): [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] yield resources [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self.driver.spawn(context, instance, image_meta, [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 587.178164] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] vm_ref = self.build_virtual_machine(instance, [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] vif_infos = vmwarevif.get_vif_info(self._session, [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] for vif in network_info: [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] return self._sync_wrapper(fn, *args, **kwargs) [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self.wait() [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self[:] = self._gt.wait() [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] return self._exit_event.wait() [ 587.178570] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] result = hub.switch() [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] return self.greenlet.switch() [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] result = function(*args, **kwargs) [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] return func(*args, **kwargs) [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] raise e [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] nwinfo = self.network_api.allocate_for_instance( [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.178998] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] created_port_ids = self._update_ports_for_instance( [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] with excutils.save_and_reraise_exception(): [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self.force_reraise() [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] raise self.value [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] updated_port = self._update_port( [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] _ensure_no_port_binding_failure(port) [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.179404] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] raise exception.PortBindingFailed(port_id=port['id']) [ 587.179738] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] nova.exception.PortBindingFailed: Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. [ 587.179738] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] [ 587.179738] env[62109]: INFO nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Terminating instance [ 587.180525] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.180685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquired lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.181112] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.181255] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 2e4eb0aca611456b98e622fc711e54d9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.190421] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e4eb0aca611456b98e622fc711e54d9 [ 587.191486] env[62109]: DEBUG nova.network.neutron [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.197035] env[62109]: DEBUG nova.scheduler.client.report [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 587.199450] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg c74177bef0ae41828ccaedd9e5cf5b41 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.217848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c74177bef0ae41828ccaedd9e5cf5b41 [ 587.264719] env[62109]: DEBUG nova.network.neutron [-] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.265212] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3cc8978954644282bfa7322774f34444 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.275116] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cc8978954644282bfa7322774f34444 [ 587.322423] env[62109]: DEBUG nova.network.neutron [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.323007] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] Expecting reply to msg 53767e75213c492db88e0d97448132c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.332490] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53767e75213c492db88e0d97448132c4 [ 587.352742] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-22179940-4e5b-4879-be19-a9addb0a628c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.352995] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 587.353181] env[62109]: DEBUG nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 587.353357] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 587.380702] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.380702] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 1e82e3caf9694bc093df57ce0746867c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.387213] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e82e3caf9694bc093df57ce0746867c [ 587.701944] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.932s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.702652] env[62109]: ERROR nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Traceback (most recent call last): [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self.driver.spawn(context, instance, image_meta, [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] vm_ref = self.build_virtual_machine(instance, [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] vif_infos = vmwarevif.get_vif_info(self._session, [ 587.702652] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] for vif in network_info: [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] return self._sync_wrapper(fn, *args, **kwargs) [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self.wait() [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self[:] = self._gt.wait() [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] return self._exit_event.wait() [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] result = hub.switch() [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 587.703019] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] return self.greenlet.switch() [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] result = function(*args, **kwargs) [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] return func(*args, **kwargs) [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] raise e [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] nwinfo = self.network_api.allocate_for_instance( [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] created_port_ids = self._update_ports_for_instance( [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] with excutils.save_and_reraise_exception(): [ 587.703361] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] self.force_reraise() [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] raise self.value [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] updated_port = self._update_port( [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] _ensure_no_port_binding_failure(port) [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] raise exception.PortBindingFailed(port_id=port['id']) [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] nova.exception.PortBindingFailed: Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. [ 587.703769] env[62109]: ERROR nova.compute.manager [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] [ 587.704535] env[62109]: DEBUG nova.compute.utils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 587.704600] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.099s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.706881] env[62109]: INFO nova.compute.claims [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.708125] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg a22f0ebc3dd94317858e5943d571c6d8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.709378] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Build of instance 15dc4e5a-da5b-4657-8aec-f501d35d7a58 was re-scheduled: Binding failed for port 38c57439-2b4f-48ef-884c-f248749447a4, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 587.709951] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 587.710104] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquiring lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.710280] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Acquired lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.710471] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.710867] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg d8c4813a7ebd4bb2b26372fd34b01858 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.719507] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.719622] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8c4813a7ebd4bb2b26372fd34b01858 [ 587.751958] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a22f0ebc3dd94317858e5943d571c6d8 [ 587.767798] env[62109]: INFO nova.compute.manager [-] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Took 1.07 seconds to deallocate network for instance. [ 587.769972] env[62109]: DEBUG nova.compute.claims [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 587.770177] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.825307] env[62109]: DEBUG oslo_concurrency.lockutils [req-3f47eaeb-3678-48a7-9850-43757d6764d1 req-a8715ecb-514c-4e32-906e-80df32343b23 service nova] Releasing lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.882605] env[62109]: DEBUG nova.network.neutron [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.883161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg df8717b3fe9440989552e8c2f061507a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.893896] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df8717b3fe9440989552e8c2f061507a [ 587.939821] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.940371] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 167d988533814321982ed1a4cfbbfae7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 587.948655] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 167d988533814321982ed1a4cfbbfae7 [ 588.219088] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 2b06dd7ac790464c97a1b422676e0d29 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 588.228346] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b06dd7ac790464c97a1b422676e0d29 [ 588.251840] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.365507] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.366051] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 075fc12a137948189892beeebcc1f5f9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 588.375743] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 075fc12a137948189892beeebcc1f5f9 [ 588.385284] env[62109]: INFO nova.compute.manager [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 22179940-4e5b-4879-be19-a9addb0a628c] Took 1.03 seconds to deallocate network for instance. [ 588.386988] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 738a11306e1e4c66bd5e15efe8dd539e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 588.438478] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 738a11306e1e4c66bd5e15efe8dd539e [ 588.442744] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Releasing lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.443161] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 588.443349] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 588.443964] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2608e39e-6b16-401f-9434-b9a8c8ae51af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.452970] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a03530-8953-4f0b-8eb8-6737fdc22ebc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.470392] env[62109]: ERROR nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. [ 588.470392] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 588.470392] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.470392] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 588.470392] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.470392] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 588.470392] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.470392] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 588.470392] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.470392] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 588.470392] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.470392] env[62109]: ERROR nova.compute.manager raise self.value [ 588.470392] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.470392] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 588.470392] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.470392] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 588.470935] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.470935] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 588.470935] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. [ 588.470935] env[62109]: ERROR nova.compute.manager [ 588.470935] env[62109]: Traceback (most recent call last): [ 588.470935] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 588.470935] env[62109]: listener.cb(fileno) [ 588.470935] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.470935] env[62109]: result = function(*args, **kwargs) [ 588.470935] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 588.470935] env[62109]: return func(*args, **kwargs) [ 588.470935] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.470935] env[62109]: raise e [ 588.470935] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.470935] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 588.470935] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.470935] env[62109]: created_port_ids = self._update_ports_for_instance( [ 588.470935] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.470935] env[62109]: with excutils.save_and_reraise_exception(): [ 588.470935] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.470935] env[62109]: self.force_reraise() [ 588.470935] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.470935] env[62109]: raise self.value [ 588.470935] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.470935] env[62109]: updated_port = self._update_port( [ 588.470935] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.470935] env[62109]: _ensure_no_port_binding_failure(port) [ 588.470935] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.470935] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 588.471782] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. [ 588.471782] env[62109]: Removing descriptor: 19 [ 588.471782] env[62109]: ERROR nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Traceback (most recent call last): [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] yield resources [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self.driver.spawn(context, instance, image_meta, [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 588.471782] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] vm_ref = self.build_virtual_machine(instance, [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] vif_infos = vmwarevif.get_vif_info(self._session, [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] for vif in network_info: [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] return self._sync_wrapper(fn, *args, **kwargs) [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self.wait() [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self[:] = self._gt.wait() [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] return self._exit_event.wait() [ 588.472650] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] result = hub.switch() [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] return self.greenlet.switch() [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] result = function(*args, **kwargs) [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] return func(*args, **kwargs) [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] raise e [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] nwinfo = self.network_api.allocate_for_instance( [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.473016] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] created_port_ids = self._update_ports_for_instance( [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] with excutils.save_and_reraise_exception(): [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self.force_reraise() [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] raise self.value [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] updated_port = self._update_port( [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] _ensure_no_port_binding_failure(port) [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.473386] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] raise exception.PortBindingFailed(port_id=port['id']) [ 588.473704] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] nova.exception.PortBindingFailed: Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. [ 588.473704] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] [ 588.473704] env[62109]: INFO nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Terminating instance [ 588.475598] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Acquiring lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.475832] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Acquired lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.476450] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 588.476577] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 2d228d2b422545d78fc0bf9f05e19981 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 588.487262] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c742fcf9-ac27-4a04-81a2-d99741dba794 could not be found. [ 588.487470] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 588.487643] env[62109]: INFO nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Took 0.04 seconds to destroy the instance on the hypervisor. [ 588.488013] env[62109]: DEBUG oslo.service.loopingcall [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.488549] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d228d2b422545d78fc0bf9f05e19981 [ 588.488951] env[62109]: DEBUG nova.compute.manager [-] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.489050] env[62109]: DEBUG nova.network.neutron [-] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 588.572918] env[62109]: DEBUG nova.network.neutron [-] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.573471] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cfc1f03abc51445c9418c0a665de22bc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 588.580740] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfc1f03abc51445c9418c0a665de22bc [ 588.804120] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Acquiring lock "3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.804399] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Lock "3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.808630] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.808893] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.868446] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Releasing lock "refresh_cache-15dc4e5a-da5b-4657-8aec-f501d35d7a58" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.868728] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 588.868970] env[62109]: DEBUG nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.869115] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 588.891890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 276821c0fed34f608b8effcfd4c9d32e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 588.916223] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.916930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg 3075094b4b5643df8f7c7b70b310972b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 588.930409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3075094b4b5643df8f7c7b70b310972b [ 588.931638] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 276821c0fed34f608b8effcfd4c9d32e [ 589.029797] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.075197] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96fe375-3ee1-4369-a870-494709ed8dab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.077955] env[62109]: DEBUG nova.network.neutron [-] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.078379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 93937f0e7e3a4c5996c1248d415c51a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.086958] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd17a24d-2b1d-47ac-b378-96187414ee13 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.133490] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93937f0e7e3a4c5996c1248d415c51a2 [ 589.140435] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0283c7f-e2cd-4711-8fc4-b483884455a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.150996] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b51d0d-cef5-4fa3-84f4-9578016c5dff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.166544] env[62109]: DEBUG nova.compute.provider_tree [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.166823] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 2238ce5620bc4d97aa5b55fd9f72a848 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.185671] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2238ce5620bc4d97aa5b55fd9f72a848 [ 589.205491] env[62109]: DEBUG nova.compute.manager [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Received event network-changed-5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 589.205932] env[62109]: DEBUG nova.compute.manager [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Refreshing instance network info cache due to event network-changed-5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 589.205932] env[62109]: DEBUG oslo_concurrency.lockutils [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] Acquiring lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.236868] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.237380] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 1662248c6cfa40838c010e2e215001ef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.247813] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1662248c6cfa40838c010e2e215001ef [ 589.420347] env[62109]: INFO nova.scheduler.client.report [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Deleted allocations for instance 22179940-4e5b-4879-be19-a9addb0a628c [ 589.428377] env[62109]: DEBUG nova.network.neutron [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.428899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg af53e9d0f4f14a3ca7e630c9b3c48a51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.430096] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg f4e79b2191904162b51197fce577a115 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.441642] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af53e9d0f4f14a3ca7e630c9b3c48a51 [ 589.458950] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4e79b2191904162b51197fce577a115 [ 589.583866] env[62109]: INFO nova.compute.manager [-] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Took 1.09 seconds to deallocate network for instance. [ 589.585118] env[62109]: DEBUG nova.compute.claims [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 589.585118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.669516] env[62109]: DEBUG nova.scheduler.client.report [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.671980] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 7172541be967449db040efe6b1777c60 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.684770] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7172541be967449db040efe6b1777c60 [ 589.694116] env[62109]: DEBUG nova.compute.manager [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Received event network-vif-deleted-636f6a96-849f-411e-a602-2d048aa9867a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 589.694364] env[62109]: DEBUG nova.compute.manager [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Received event network-changed-4bf704f8-170e-48fa-9d73-18fa03b4afe7 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 589.694468] env[62109]: DEBUG nova.compute.manager [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Refreshing instance network info cache due to event network-changed-4bf704f8-170e-48fa-9d73-18fa03b4afe7. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 589.694665] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] Acquiring lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.694794] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] Acquired lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.694941] env[62109]: DEBUG nova.network.neutron [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Refreshing network info cache for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 589.695361] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] Expecting reply to msg 985cf9e489b8470e983efa25a0290f04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.703128] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 985cf9e489b8470e983efa25a0290f04 [ 589.744106] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Releasing lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.744106] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 589.744106] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 589.744106] env[62109]: DEBUG oslo_concurrency.lockutils [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] Acquired lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.744106] env[62109]: DEBUG nova.network.neutron [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Refreshing network info cache for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 589.744971] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] Expecting reply to msg 1cbe945df2874f87a4f92ef1e323eab7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.744971] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e93b4d7-40d0-4029-9fda-28bb9147728a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.752095] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bbcfe5-e978-4a52-895e-5f340767db73 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.769555] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cbe945df2874f87a4f92ef1e323eab7 [ 589.784111] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d24eec8f-565a-4a02-834c-267e633ebb12 could not be found. [ 589.784368] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 589.784543] env[62109]: INFO nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Took 0.04 seconds to destroy the instance on the hypervisor. [ 589.784780] env[62109]: DEBUG oslo.service.loopingcall [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.784998] env[62109]: DEBUG nova.compute.manager [-] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 589.785084] env[62109]: DEBUG nova.network.neutron [-] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 589.813687] env[62109]: DEBUG nova.network.neutron [-] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.813687] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1b95948aeb5f47a78437f9e8808a60b9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.821156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b95948aeb5f47a78437f9e8808a60b9 [ 589.931223] env[62109]: INFO nova.compute.manager [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] [instance: 15dc4e5a-da5b-4657-8aec-f501d35d7a58] Took 1.06 seconds to deallocate network for instance. [ 589.933050] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg e3dc166d2e9f4368b7d4e0c744eba2b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.934168] env[62109]: DEBUG oslo_concurrency.lockutils [None req-640de2c7-753c-4d78-b3be-95d920cb7fe3 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "22179940-4e5b-4879-be19-a9addb0a628c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.173s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.934891] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 03864f4af913471a9a878570bf3a9ad6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 589.952328] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03864f4af913471a9a878570bf3a9ad6 [ 589.988714] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3dc166d2e9f4368b7d4e0c744eba2b8 [ 590.040431] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Acquiring lock "80a15f60-4843-4a59-a6c1-0d5624609672" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.040823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Lock "80a15f60-4843-4a59-a6c1-0d5624609672" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.174652] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.175209] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 590.177007] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg e2be1a61feb54f059446ef37e84c425e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.177987] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.515s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.184264] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 31b835a6503f4038a70f1fe929d15cc8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.242345] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2be1a61feb54f059446ef37e84c425e [ 590.243568] env[62109]: DEBUG nova.network.neutron [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 590.258635] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31b835a6503f4038a70f1fe929d15cc8 [ 590.272172] env[62109]: DEBUG nova.network.neutron [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 590.315483] env[62109]: DEBUG nova.network.neutron [-] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.316252] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e095b355ddba468dabd4f1830e3c2059 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.329349] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e095b355ddba468dabd4f1830e3c2059 [ 590.398171] env[62109]: DEBUG nova.network.neutron [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.398696] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] Expecting reply to msg c40bd889f48f45c28b17e7bd98a16d1c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.409060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c40bd889f48f45c28b17e7bd98a16d1c [ 590.414202] env[62109]: DEBUG nova.network.neutron [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.414716] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] Expecting reply to msg c6e9fcc6c5f64938adea03dfadc1ab97 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.431155] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6e9fcc6c5f64938adea03dfadc1ab97 [ 590.437629] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg a9070c7013a54b7ebf1773b9da6ec3fe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.442976] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 590.442976] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg b14ac9060124494899b7ac02fd2d4744 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.483385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9070c7013a54b7ebf1773b9da6ec3fe [ 590.502373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b14ac9060124494899b7ac02fd2d4744 [ 590.685956] env[62109]: DEBUG nova.compute.utils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.686588] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 440877deddfd4f38bb033bac20b6d7df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.697547] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 590.697874] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 590.707579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 440877deddfd4f38bb033bac20b6d7df [ 590.819047] env[62109]: INFO nova.compute.manager [-] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Took 1.03 seconds to deallocate network for instance. [ 590.822624] env[62109]: DEBUG nova.compute.claims [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 590.822624] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.833718] env[62109]: DEBUG nova.policy [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d76306b5db04e97a1c3e28ffdc73d78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c9199b442e5466390387d8884f8bc96', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 590.901639] env[62109]: DEBUG oslo_concurrency.lockutils [req-f0b4556f-5a2e-48d5-a091-8183ca242c7e req-895ceefe-9a1a-42b4-ba44-96f488075708 service nova] Releasing lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.922508] env[62109]: DEBUG oslo_concurrency.lockutils [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] Releasing lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.922781] env[62109]: DEBUG nova.compute.manager [req-bb26f5e0-41bd-44ed-a2ff-edd8f2f17db5 req-9e05e479-8323-4bd0-8475-a541f70ccbf2 service nova] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Received event network-vif-deleted-4bf704f8-170e-48fa-9d73-18fa03b4afe7 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 590.961788] env[62109]: INFO nova.scheduler.client.report [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Deleted allocations for instance 15dc4e5a-da5b-4657-8aec-f501d35d7a58 [ 590.969022] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Expecting reply to msg cfa2023cd2f64995863f4576f1678584 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 590.983154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfa2023cd2f64995863f4576f1678584 [ 590.992823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.060062] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5f8dad-a3d4-4cd4-a29c-83891ed1fd40 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.066414] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e414300c-23f7-4a6a-b6c3-dbe28b9dc811 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.102877] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ac240c-f8df-4270-bcca-1e2dc3cbc310 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.114105] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a322513-dfc7-4e38-9aac-051f46808305 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.127418] env[62109]: DEBUG nova.compute.provider_tree [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.127956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg ef47409f9f2f46ee8541f1175bb4c362 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 591.136774] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef47409f9f2f46ee8541f1175bb4c362 [ 591.198663] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 591.200411] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 7194dc40f10a43bab1e5374f3fb5700f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 591.261927] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7194dc40f10a43bab1e5374f3fb5700f [ 591.473580] env[62109]: DEBUG oslo_concurrency.lockutils [None req-2936a47b-0996-4d83-bf82-1b2c3804d4cd tempest-ServersAdminTestJSON-1198871778 tempest-ServersAdminTestJSON-1198871778-project-member] Lock "15dc4e5a-da5b-4657-8aec-f501d35d7a58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.701s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.474100] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 288441f026cb4994bd50752027a32935 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 591.489026] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 288441f026cb4994bd50752027a32935 [ 591.631686] env[62109]: DEBUG nova.scheduler.client.report [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 591.634222] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg b8026235f3a648b38040361265577146 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 591.641126] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Successfully created port: c9378bce-31f2-4b1a-a99e-35f01300a701 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.646744] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8026235f3a648b38040361265577146 [ 591.706108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 99dea6d55b2242f68ab39ff99c64e3c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 591.736458] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99dea6d55b2242f68ab39ff99c64e3c2 [ 591.976575] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 591.978317] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 03c6a828a13e48b2b198c87b168bf636 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 592.016391] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03c6a828a13e48b2b198c87b168bf636 [ 592.137627] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.960s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.138346] env[62109]: ERROR nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Traceback (most recent call last): [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self.driver.spawn(context, instance, image_meta, [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] vm_ref = self.build_virtual_machine(instance, [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] vif_infos = vmwarevif.get_vif_info(self._session, [ 592.138346] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] for vif in network_info: [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] return self._sync_wrapper(fn, *args, **kwargs) [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self.wait() [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self[:] = self._gt.wait() [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] return self._exit_event.wait() [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] result = hub.switch() [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 592.138702] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] return self.greenlet.switch() [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] result = function(*args, **kwargs) [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] return func(*args, **kwargs) [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] raise e [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] nwinfo = self.network_api.allocate_for_instance( [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] created_port_ids = self._update_ports_for_instance( [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] with excutils.save_and_reraise_exception(): [ 592.139071] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] self.force_reraise() [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] raise self.value [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] updated_port = self._update_port( [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] _ensure_no_port_binding_failure(port) [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] raise exception.PortBindingFailed(port_id=port['id']) [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] nova.exception.PortBindingFailed: Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. [ 592.139415] env[62109]: ERROR nova.compute.manager [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] [ 592.139852] env[62109]: DEBUG nova.compute.utils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 592.140334] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.363s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.142220] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 294dcec78ad74b4fbedaeaa17fd41253 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 592.143596] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Build of instance 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7 was re-scheduled: Binding failed for port 228e8a13-db93-4bfa-b8bc-ab5292e785b1, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 592.143893] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 592.144499] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Acquiring lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.144692] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Acquired lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.144884] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.145282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 4940b440ee874639a75e21789ee91cf7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 592.153624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4940b440ee874639a75e21789ee91cf7 [ 592.175056] env[62109]: DEBUG nova.compute.manager [req-854035bf-58a7-4a1c-a540-63c2ffe9c624 req-dfb2e7c8-e8ea-4204-be54-564b2a5a20b5 service nova] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Received event network-vif-deleted-5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 592.207567] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 294dcec78ad74b4fbedaeaa17fd41253 [ 592.211527] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 592.241646] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.241914] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.242059] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.242237] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.242420] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.242574] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.242784] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.242933] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.243164] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.243393] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.243532] env[62109]: DEBUG nova.virt.hardware [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.244941] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac553fb-2ed1-4f5b-bfb1-54a4043eb402 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.256042] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fc4a00-3e9b-4490-8925-5de110fc6340 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.510565] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.712601] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.955266] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f38c74a-cf80-4a8b-9a32-ac22cef15a3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.963260] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14931cdc-b211-4eef-8e36-d95095d0b37b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.994991] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495da24f-7b78-4e6e-8b1f-8eb79c8bd33d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.003093] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48fa029-d0d8-4cdc-8e85-9ac1c5d83277 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.019400] env[62109]: DEBUG nova.compute.provider_tree [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.020038] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 9fe63e863af247b998a891503a8334e0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 593.031719] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fe63e863af247b998a891503a8334e0 [ 593.056225] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.056799] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 7637148ce46145d39c3730ba10ce6010 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 593.067794] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7637148ce46145d39c3730ba10ce6010 [ 593.173726] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Acquiring lock "15e2e743-070f-4545-b976-ced38fd99198" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.174020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Lock "15e2e743-070f-4545-b976-ced38fd99198" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.526127] env[62109]: DEBUG nova.scheduler.client.report [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 593.526127] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 7943059fc9514499ab8df36c0982b6cc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 593.545367] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7943059fc9514499ab8df36c0982b6cc [ 593.561980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Releasing lock "refresh_cache-5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.561980] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 593.561980] env[62109]: DEBUG nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 593.561980] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 593.655063] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.655809] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg d82375a5b99249889e3d25530e33d571 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 593.663585] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d82375a5b99249889e3d25530e33d571 [ 594.030472] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.889s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.030472] env[62109]: ERROR nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. [ 594.030472] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Traceback (most recent call last): [ 594.030472] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 594.030472] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self.driver.spawn(context, instance, image_meta, [ 594.030472] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 594.030472] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.030472] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 594.030472] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] vm_ref = self.build_virtual_machine(instance, [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] for vif in network_info: [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] return self._sync_wrapper(fn, *args, **kwargs) [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self.wait() [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self[:] = self._gt.wait() [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] return self._exit_event.wait() [ 594.030963] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] result = hub.switch() [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] return self.greenlet.switch() [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] result = function(*args, **kwargs) [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] return func(*args, **kwargs) [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] raise e [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] nwinfo = self.network_api.allocate_for_instance( [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 594.031314] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] created_port_ids = self._update_ports_for_instance( [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] with excutils.save_and_reraise_exception(): [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] self.force_reraise() [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] raise self.value [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] updated_port = self._update_port( [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] _ensure_no_port_binding_failure(port) [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.031674] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] raise exception.PortBindingFailed(port_id=port['id']) [ 594.032063] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] nova.exception.PortBindingFailed: Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. [ 594.032063] env[62109]: ERROR nova.compute.manager [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] [ 594.032063] env[62109]: DEBUG nova.compute.utils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 594.032435] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.481s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.033831] env[62109]: INFO nova.compute.claims [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 594.035421] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 3c488204b7cb48c991af35992907a7c9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 594.039714] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Build of instance 2a473614-2051-47ab-a9bc-f87385a264cd was re-scheduled: Binding failed for port ebefe3f7-1348-4203-ab33-a289a4fa30de, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 594.039714] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 594.039714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Acquiring lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.039714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Acquired lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.039973] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 594.039973] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg dc0e409d38af4b0e9110902333e4d346 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 594.046062] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc0e409d38af4b0e9110902333e4d346 [ 594.088532] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c488204b7cb48c991af35992907a7c9 [ 594.157912] env[62109]: DEBUG nova.network.neutron [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.158513] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 1ac0a7e7db9c475ca028e3feeac33f92 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 594.177945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ac0a7e7db9c475ca028e3feeac33f92 [ 594.276682] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Acquiring lock "2fa640c2-b433-4581-be4b-0673c1451043" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.276897] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Lock "2fa640c2-b433-4581-be4b-0673c1451043" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.482161] env[62109]: ERROR nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. [ 594.482161] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 594.482161] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.482161] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 594.482161] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 594.482161] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 594.482161] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 594.482161] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 594.482161] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.482161] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 594.482161] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.482161] env[62109]: ERROR nova.compute.manager raise self.value [ 594.482161] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 594.482161] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 594.482161] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.482161] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 594.482673] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.482673] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 594.482673] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. [ 594.482673] env[62109]: ERROR nova.compute.manager [ 594.482673] env[62109]: Traceback (most recent call last): [ 594.482673] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 594.482673] env[62109]: listener.cb(fileno) [ 594.482673] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.482673] env[62109]: result = function(*args, **kwargs) [ 594.482673] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 594.482673] env[62109]: return func(*args, **kwargs) [ 594.482673] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.482673] env[62109]: raise e [ 594.482673] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.482673] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 594.482673] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 594.482673] env[62109]: created_port_ids = self._update_ports_for_instance( [ 594.482673] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 594.482673] env[62109]: with excutils.save_and_reraise_exception(): [ 594.482673] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.482673] env[62109]: self.force_reraise() [ 594.482673] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.482673] env[62109]: raise self.value [ 594.482673] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 594.482673] env[62109]: updated_port = self._update_port( [ 594.482673] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.482673] env[62109]: _ensure_no_port_binding_failure(port) [ 594.482673] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.482673] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 594.483481] env[62109]: nova.exception.PortBindingFailed: Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. [ 594.483481] env[62109]: Removing descriptor: 19 [ 594.483481] env[62109]: ERROR nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Traceback (most recent call last): [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] yield resources [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self.driver.spawn(context, instance, image_meta, [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 594.483481] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] vm_ref = self.build_virtual_machine(instance, [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] vif_infos = vmwarevif.get_vif_info(self._session, [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] for vif in network_info: [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] return self._sync_wrapper(fn, *args, **kwargs) [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self.wait() [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self[:] = self._gt.wait() [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] return self._exit_event.wait() [ 594.483894] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] result = hub.switch() [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] return self.greenlet.switch() [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] result = function(*args, **kwargs) [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] return func(*args, **kwargs) [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] raise e [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] nwinfo = self.network_api.allocate_for_instance( [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 594.484502] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] created_port_ids = self._update_ports_for_instance( [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] with excutils.save_and_reraise_exception(): [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self.force_reraise() [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] raise self.value [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] updated_port = self._update_port( [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] _ensure_no_port_binding_failure(port) [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.484913] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] raise exception.PortBindingFailed(port_id=port['id']) [ 594.485259] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] nova.exception.PortBindingFailed: Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. [ 594.485259] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] [ 594.485259] env[62109]: INFO nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Terminating instance [ 594.486177] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Acquiring lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.486327] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Acquired lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.486485] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 594.487139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg cd84f56078c641b684fb22b85a11b1f6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 594.494801] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd84f56078c641b684fb22b85a11b1f6 [ 594.541787] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg af0d65dd071740f6bf6e04b1491f53e9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 594.554363] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af0d65dd071740f6bf6e04b1491f53e9 [ 594.603114] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.661320] env[62109]: INFO nova.compute.manager [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] [instance: 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7] Took 1.10 seconds to deallocate network for instance. [ 594.663326] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 21ccb7c9dbf24bcd993642c0a8632ee8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 594.718825] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21ccb7c9dbf24bcd993642c0a8632ee8 [ 594.723409] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.724022] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg d172635fc22a4fcd8bddd3859ad2fc7b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 594.740225] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d172635fc22a4fcd8bddd3859ad2fc7b [ 595.132078] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.168394] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg c70a8a2b7eca4e95b6eb9429970a27e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 595.225569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c70a8a2b7eca4e95b6eb9429970a27e2 [ 595.226292] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Releasing lock "refresh_cache-2a473614-2051-47ab-a9bc-f87385a264cd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.226501] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 595.226677] env[62109]: DEBUG nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 595.226836] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 595.310061] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.310643] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 0fdfbb62ae8149298bb9c3fcf9cb743a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 595.336992] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fdfbb62ae8149298bb9c3fcf9cb743a [ 595.372475] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.372475] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 94808ad6bcc14d478f2e9d6328913667 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 595.384721] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94808ad6bcc14d478f2e9d6328913667 [ 595.424925] env[62109]: DEBUG nova.compute.manager [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Received event network-changed-c9378bce-31f2-4b1a-a99e-35f01300a701 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 595.425230] env[62109]: DEBUG nova.compute.manager [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Refreshing instance network info cache due to event network-changed-c9378bce-31f2-4b1a-a99e-35f01300a701. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 595.425334] env[62109]: DEBUG oslo_concurrency.lockutils [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] Acquiring lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.481554] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f569033-800b-4170-8a3a-e1d4f6d71541 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.492796] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7934e9d-4d79-47a6-8424-c77905fe0dd0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.539742] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbd2d76-7a2b-4786-afb5-dfc471d098fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.544124] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "535045d9-108e-4e88-82f0-9da98f2f55a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.544506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "535045d9-108e-4e88-82f0-9da98f2f55a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.550289] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dbd969-47b4-4e39-99e2-209327ede885 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.569032] env[62109]: DEBUG nova.compute.provider_tree [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.569644] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 72f6bf4be8f14d269845ac3584a31fca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 595.579373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72f6bf4be8f14d269845ac3584a31fca [ 595.696450] env[62109]: INFO nova.scheduler.client.report [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Deleted allocations for instance 5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7 [ 595.703507] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Expecting reply to msg 4205e5a481dd4243b0c98276004027ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 595.722563] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4205e5a481dd4243b0c98276004027ba [ 595.813915] env[62109]: DEBUG nova.network.neutron [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.814560] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 0c2f987a788e4f3a8ab4a872fbfa15d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 595.833450] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c2f987a788e4f3a8ab4a872fbfa15d4 [ 595.874790] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Releasing lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.875288] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 595.875426] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 595.875761] env[62109]: DEBUG oslo_concurrency.lockutils [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] Acquired lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.875931] env[62109]: DEBUG nova.network.neutron [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Refreshing network info cache for port c9378bce-31f2-4b1a-a99e-35f01300a701 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 595.876405] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] Expecting reply to msg a684afe9b9b04b37a43c498378db867c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 595.877322] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb0f9370-349e-4e17-b0a1-d09f2c53f085 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.886727] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a684afe9b9b04b37a43c498378db867c [ 595.889737] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7490c45-1da9-4483-85e6-ba22280d0dc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.917172] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 39901fc8-8fc8-4812-936e-0ded3811d61c could not be found. [ 595.917927] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 595.918745] env[62109]: INFO nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 595.919377] env[62109]: DEBUG oslo.service.loopingcall [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 595.919719] env[62109]: DEBUG nova.compute.manager [-] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 595.920029] env[62109]: DEBUG nova.network.neutron [-] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 595.947168] env[62109]: DEBUG nova.network.neutron [-] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.948726] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 05e3ba2e7af64d71a6d3c3af7b09da29 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 595.956138] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05e3ba2e7af64d71a6d3c3af7b09da29 [ 596.076285] env[62109]: DEBUG nova.scheduler.client.report [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 596.079282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 1e75508468e14013a659e8247ec43f39 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.091422] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e75508468e14013a659e8247ec43f39 [ 596.208037] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d4e425a3-dfe5-4f27-b4c2-e8ecf97207b5 tempest-VolumesAssistedSnapshotsTest-857465433 tempest-VolumesAssistedSnapshotsTest-857465433-project-member] Lock "5bde6403-4d82-4f8d-a3fb-fa4e75f0aaf7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.015s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.208641] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 5cdef523b21c44a8a24e9e3961b49995 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.218848] env[62109]: DEBUG nova.compute.manager [None req-a7234f82-99ff-4714-b909-a4cd05d145e5 tempest-ServerDiagnosticsV248Test-86897178 tempest-ServerDiagnosticsV248Test-86897178-project-admin] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 596.219286] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c6b5ca-3e75-4dd2-a18c-8c9ee876075a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.222316] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cdef523b21c44a8a24e9e3961b49995 [ 596.227019] env[62109]: INFO nova.compute.manager [None req-a7234f82-99ff-4714-b909-a4cd05d145e5 tempest-ServerDiagnosticsV248Test-86897178 tempest-ServerDiagnosticsV248Test-86897178-project-admin] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Retrieving diagnostics [ 596.227983] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dcd3f6-0308-436a-a02b-937abd145e16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.318672] env[62109]: INFO nova.compute.manager [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] [instance: 2a473614-2051-47ab-a9bc-f87385a264cd] Took 1.09 seconds to deallocate network for instance. [ 596.320640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 3dc0a6d339b7484dbbb190ac8af5c333 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.358991] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dc0a6d339b7484dbbb190ac8af5c333 [ 596.407482] env[62109]: DEBUG nova.network.neutron [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 596.450784] env[62109]: DEBUG nova.network.neutron [-] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.451268] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4dc8d0b1a6104285ab3e84bfbba05121 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.459269] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dc8d0b1a6104285ab3e84bfbba05121 [ 596.543388] env[62109]: DEBUG nova.network.neutron [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.543916] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] Expecting reply to msg 5194164c32af483daf4c75c386f95019 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.553549] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5194164c32af483daf4c75c386f95019 [ 596.582644] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.583178] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 596.584923] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg a587af8282df4611acef945381858853 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.589379] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.811s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.589379] env[62109]: DEBUG nova.objects.instance [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lazy-loading 'resources' on Instance uuid 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 596.589379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg a3f8a9aa1c4e496b96453ba0bebcbec0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.602425] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3f8a9aa1c4e496b96453ba0bebcbec0 [ 596.649872] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a587af8282df4611acef945381858853 [ 596.925287] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 596.926520] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg a1404f8571d54c80bd4c02667a4b9c95 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.938739] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 89f447509f314346afd7b18a6884b9d6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 596.954354] env[62109]: INFO nova.compute.manager [-] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Took 1.03 seconds to deallocate network for instance. [ 596.956670] env[62109]: DEBUG nova.compute.claims [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 596.956847] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.985443] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1404f8571d54c80bd4c02667a4b9c95 [ 597.018066] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89f447509f314346afd7b18a6884b9d6 [ 597.046051] env[62109]: DEBUG oslo_concurrency.lockutils [req-599ae125-4da3-4efb-b959-1662f13638ce req-cb2d6ac2-871d-45ec-b7ec-70f0a1c081be service nova] Releasing lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.090283] env[62109]: DEBUG nova.compute.utils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 597.090915] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 4814d8d0ce1548ac89a99f2165d3f227 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 597.099130] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 597.099439] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 597.106373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4814d8d0ce1548ac89a99f2165d3f227 [ 597.300365] env[62109]: DEBUG nova.policy [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f67ad126d9354c9e9cd8ac79b829fb4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0e5d54143934f498a4a7849ec99a645', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 597.401684] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039cf048-d013-4357-a282-07082abd3e5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.411518] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd4335b-5745-49c5-83a3-3152dcbce905 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.444526] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1072d6-8cd2-49ee-b886-27f5e1e84a8d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.463926] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938dba6f-d969-4300-820a-eb9d2c8ee003 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.468175] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.477785] env[62109]: DEBUG nova.compute.provider_tree [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.478349] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 874d5667429441deb36acf372830d212 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 597.479890] env[62109]: INFO nova.scheduler.client.report [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Deleted allocations for instance 2a473614-2051-47ab-a9bc-f87385a264cd [ 597.486769] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Expecting reply to msg 3e645aef17164f5a9ed021b7ba5ed490 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 597.488201] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 874d5667429441deb36acf372830d212 [ 597.511415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e645aef17164f5a9ed021b7ba5ed490 [ 597.600066] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 597.601782] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 5978edd9b78c482ebc00f2f947be01f6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 597.643616] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5978edd9b78c482ebc00f2f947be01f6 [ 597.851564] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Successfully created port: f8b581d8-499a-4fb9-841d-9a1f886b5fda {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.987299] env[62109]: DEBUG nova.scheduler.client.report [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.989903] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg e5ac495428164805a6e96eb8bf82ffef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 597.994765] env[62109]: DEBUG oslo_concurrency.lockutils [None req-48072dcd-56b4-4ba7-9d78-0fba124fc2c3 tempest-ServersWithSpecificFlavorTestJSON-787125351 tempest-ServersWithSpecificFlavorTestJSON-787125351-project-member] Lock "2a473614-2051-47ab-a9bc-f87385a264cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.162s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.994765] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 92a75e42bc7145a2a9ba16c98066a780 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 598.004613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5ac495428164805a6e96eb8bf82ffef [ 598.006834] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92a75e42bc7145a2a9ba16c98066a780 [ 598.106255] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 132bb6e60ca14cb7ad16219b2b0eef6a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 598.150033] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 132bb6e60ca14cb7ad16219b2b0eef6a [ 598.399610] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg b7eaf58a848f49fcb0a807e7ba3fb35c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 598.416741] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7eaf58a848f49fcb0a807e7ba3fb35c [ 598.449271] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Acquiring lock "83a25ff9-cc7d-4917-95cc-e621884bcee8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.449271] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Lock "83a25ff9-cc7d-4917-95cc-e621884bcee8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.492785] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.907s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.495512] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.143s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.497271] env[62109]: INFO nova.compute.claims [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.499064] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 6711b37026b54d18af4103c95ff4b8e8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 598.500043] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 598.501512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 0cd2316f755a443d931b74b026ffcab5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 598.520559] env[62109]: INFO nova.scheduler.client.report [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Deleted allocations for instance 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f [ 598.524172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 0d56f188623b4cc9979015e0d50b299a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 598.554613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cd2316f755a443d931b74b026ffcab5 [ 598.573379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6711b37026b54d18af4103c95ff4b8e8 [ 598.601930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d56f188623b4cc9979015e0d50b299a [ 598.610051] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 598.630372] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Acquiring lock "b537150e-9136-4fa4-b092-4f4995b918b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.630601] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Lock "b537150e-9136-4fa4-b092-4f4995b918b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.638142] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 598.638380] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 598.638528] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.638697] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 598.638838] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.639021] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 598.639226] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 598.639375] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 598.639530] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 598.639684] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 598.639852] env[62109]: DEBUG nova.virt.hardware [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 598.641157] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92ed595-71c1-4d73-8091-b485f20c5df6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.652326] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb95acd-2ffc-493f-9a25-42ec6b182298 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.810690] env[62109]: DEBUG nova.compute.manager [req-a544b5bc-1fa0-4e41-bc51-4157553864ac req-0b7606d9-571f-4b96-8aa0-1a3115f36ea7 service nova] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Received event network-vif-deleted-c9378bce-31f2-4b1a-a99e-35f01300a701 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 598.902442] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquiring lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.902442] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.902579] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquiring lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.903275] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.903275] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.905197] env[62109]: INFO nova.compute.manager [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Terminating instance [ 598.907538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquiring lock "refresh_cache-9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.907538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquired lock "refresh_cache-9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.907538] env[62109]: DEBUG nova.network.neutron [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.907908] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 5f99ab598ac64caca6fa94e06732c80e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 598.914533] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f99ab598ac64caca6fa94e06732c80e [ 599.004469] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 592eeadd41094a7e9379987e13640792 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 599.022040] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 592eeadd41094a7e9379987e13640792 [ 599.028458] env[62109]: DEBUG oslo_concurrency.lockutils [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Lock "7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.115s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.028896] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-579c8955-f7a6-4f67-b5f5-2dbcfa54c32f tempest-ServersAaction247Test-1038962410 tempest-ServersAaction247Test-1038962410-project-member] Expecting reply to msg 1d40a8a5cad641788b07021d4b5eb65f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 599.032883] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.047658] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d40a8a5cad641788b07021d4b5eb65f [ 599.165502] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Acquiring lock "fe9756ba-0eb5-41ad-913f-e933f97542cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.165961] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Lock "fe9756ba-0eb5-41ad-913f-e933f97542cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.445587] env[62109]: DEBUG nova.network.neutron [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.639861] env[62109]: ERROR nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. [ 599.639861] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 599.639861] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.639861] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 599.639861] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 599.639861] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 599.639861] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 599.639861] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 599.639861] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.639861] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 599.639861] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.639861] env[62109]: ERROR nova.compute.manager raise self.value [ 599.639861] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 599.639861] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 599.639861] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.639861] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 599.640387] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.640387] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 599.640387] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. [ 599.640387] env[62109]: ERROR nova.compute.manager [ 599.640387] env[62109]: Traceback (most recent call last): [ 599.640387] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 599.640387] env[62109]: listener.cb(fileno) [ 599.640387] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.640387] env[62109]: result = function(*args, **kwargs) [ 599.640387] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 599.640387] env[62109]: return func(*args, **kwargs) [ 599.640387] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 599.640387] env[62109]: raise e [ 599.640387] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.640387] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 599.640387] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 599.640387] env[62109]: created_port_ids = self._update_ports_for_instance( [ 599.640387] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 599.640387] env[62109]: with excutils.save_and_reraise_exception(): [ 599.640387] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.640387] env[62109]: self.force_reraise() [ 599.640387] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.640387] env[62109]: raise self.value [ 599.640387] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 599.640387] env[62109]: updated_port = self._update_port( [ 599.640387] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.640387] env[62109]: _ensure_no_port_binding_failure(port) [ 599.640387] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.640387] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 599.641181] env[62109]: nova.exception.PortBindingFailed: Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. [ 599.641181] env[62109]: Removing descriptor: 19 [ 599.641181] env[62109]: ERROR nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Traceback (most recent call last): [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] yield resources [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self.driver.spawn(context, instance, image_meta, [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self._vmops.spawn(context, instance, image_meta, injected_files, [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 599.641181] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] vm_ref = self.build_virtual_machine(instance, [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] vif_infos = vmwarevif.get_vif_info(self._session, [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] for vif in network_info: [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] return self._sync_wrapper(fn, *args, **kwargs) [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self.wait() [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self[:] = self._gt.wait() [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] return self._exit_event.wait() [ 599.641528] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] result = hub.switch() [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] return self.greenlet.switch() [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] result = function(*args, **kwargs) [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] return func(*args, **kwargs) [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] raise e [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] nwinfo = self.network_api.allocate_for_instance( [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 599.641885] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] created_port_ids = self._update_ports_for_instance( [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] with excutils.save_and_reraise_exception(): [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self.force_reraise() [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] raise self.value [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] updated_port = self._update_port( [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] _ensure_no_port_binding_failure(port) [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.642384] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] raise exception.PortBindingFailed(port_id=port['id']) [ 599.642740] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] nova.exception.PortBindingFailed: Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. [ 599.642740] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] [ 599.642740] env[62109]: INFO nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Terminating instance [ 599.643271] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.643428] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquired lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.643588] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 599.644141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg b6e5afeedb384ac486e97c6136e2db8b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 599.653666] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6e5afeedb384ac486e97c6136e2db8b [ 599.837600] env[62109]: DEBUG nova.network.neutron [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.837600] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 60f1d7adeaa647e09ac33364beb700ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 599.847575] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60f1d7adeaa647e09ac33364beb700ba [ 599.849759] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b129b6-5db7-4613-b588-e00c058c438c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.858551] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99215354-85b6-4f33-8820-b26c475e9f8d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.894793] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ddf09a-d42d-4f9f-b782-94f0994b18ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.907806] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a45776-3165-47bb-9006-fe1b6731276c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.926719] env[62109]: DEBUG nova.compute.provider_tree [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.927260] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg bc001cc4a6ac44f1a70bd13f598b99f2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 599.935908] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc001cc4a6ac44f1a70bd13f598b99f2 [ 600.183332] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.339454] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.339983] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 6bec35eedf65412baff095d0e2ccb77e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 600.340979] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Releasing lock "refresh_cache-9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.341349] env[62109]: DEBUG nova.compute.manager [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 600.341535] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 600.342527] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fe873a-79d2-4b29-b6c6-92c1fe5796ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.350087] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 600.350322] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b082609-336b-4aa3-b39f-8a44772d3e1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.352755] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bec35eedf65412baff095d0e2ccb77e [ 600.360475] env[62109]: DEBUG oslo_vmware.api [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 600.360475] env[62109]: value = "task-401454" [ 600.360475] env[62109]: _type = "Task" [ 600.360475] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.384618] env[62109]: DEBUG oslo_vmware.api [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.431843] env[62109]: DEBUG nova.scheduler.client.report [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.433173] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 15f4342830dd46698d00d67dbffc26fc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 600.445984] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15f4342830dd46698d00d67dbffc26fc [ 600.846899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Releasing lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.846899] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 600.846899] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 600.846899] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d27dbdd-02ff-4c96-82ac-9ef0932a3722 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.854205] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35d9b81-2d92-4fbf-ab9a-2e90175ace5e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.882321] env[62109]: DEBUG oslo_vmware.api [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401454, 'name': PowerOffVM_Task, 'duration_secs': 0.137142} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.886635] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 600.886832] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 600.887352] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d91c5dae-4ece-4718-a16b-534729f7ba49 could not be found. [ 600.887576] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 600.887763] env[62109]: INFO nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Took 0.04 seconds to destroy the instance on the hypervisor. [ 600.888090] env[62109]: DEBUG oslo.service.loopingcall [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.900130] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-088e2d3f-086d-4892-82ce-2e5248f1ca11 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.904544] env[62109]: DEBUG nova.compute.manager [-] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 600.904649] env[62109]: DEBUG nova.network.neutron [-] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 600.932151] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 600.932387] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 600.932561] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Deleting the datastore file [datastore2] 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 600.932816] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80eecd9e-9078-4156-a5fd-8709f5c44b76 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.935225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.440s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.935709] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 600.937409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg bcec8137a7554587a561837c6699dcde in queue reply_7522b64acfeb4981b1f36928b040d568 [ 600.938793] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.168s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.940757] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg ab60f70cc4884eb1ad1264ea76587055 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 600.947534] env[62109]: DEBUG oslo_vmware.api [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for the task: (returnval){ [ 600.947534] env[62109]: value = "task-401456" [ 600.947534] env[62109]: _type = "Task" [ 600.947534] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.961206] env[62109]: DEBUG oslo_vmware.api [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.981614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcec8137a7554587a561837c6699dcde [ 601.008125] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab60f70cc4884eb1ad1264ea76587055 [ 601.138001] env[62109]: DEBUG nova.network.neutron [-] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.138529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6522e468483c40a680e8372f856eaaab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 601.148220] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6522e468483c40a680e8372f856eaaab [ 601.410057] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Acquiring lock "11a6eaa1-0d35-49cf-9341-b74129cf087b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.410301] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Lock "11a6eaa1-0d35-49cf-9341-b74129cf087b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.443464] env[62109]: DEBUG nova.compute.utils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.444140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 2cb89a4cccba466e9bbb0f3238a48f94 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 601.445063] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.445237] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.463666] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2cb89a4cccba466e9bbb0f3238a48f94 [ 601.467173] env[62109]: DEBUG oslo_vmware.api [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Task: {'id': task-401456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105022} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.467173] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 601.467173] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 601.467173] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 601.467391] env[62109]: INFO nova.compute.manager [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Took 1.13 seconds to destroy the instance on the hypervisor. [ 601.467492] env[62109]: DEBUG oslo.service.loopingcall [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.467665] env[62109]: DEBUG nova.compute.manager [-] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 601.467753] env[62109]: DEBUG nova.network.neutron [-] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 601.491796] env[62109]: DEBUG nova.network.neutron [-] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 601.492341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fd8f7cc7ee7f4a989a18b48d098de94e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 601.536356] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd8f7cc7ee7f4a989a18b48d098de94e [ 601.621207] env[62109]: DEBUG nova.policy [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77a7bbc94ac845e0b986f36581fffd10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39bd2e59c7e344c2922dbc693a6e0f7c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 601.640373] env[62109]: DEBUG nova.network.neutron [-] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.640832] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg edfd20c261d04b95b9e5099eaf0165e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 601.650365] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edfd20c261d04b95b9e5099eaf0165e2 [ 601.779302] env[62109]: DEBUG nova.compute.manager [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Received event network-changed-f8b581d8-499a-4fb9-841d-9a1f886b5fda {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 601.779359] env[62109]: DEBUG nova.compute.manager [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Refreshing instance network info cache due to event network-changed-f8b581d8-499a-4fb9-841d-9a1f886b5fda. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 601.779566] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] Acquiring lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.779837] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] Acquired lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.779837] env[62109]: DEBUG nova.network.neutron [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Refreshing network info cache for port f8b581d8-499a-4fb9-841d-9a1f886b5fda {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 601.780848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] Expecting reply to msg 0530563568bf4a3e9a3955befc544a2b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 601.787945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0530563568bf4a3e9a3955befc544a2b [ 601.863517] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a9cdd0-f18e-421e-9501-ac3ad455e99c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.872214] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0deeca6-5dd5-4208-9b65-e5b042887d16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.906036] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d9385b-586c-409c-abbc-1d527cb8484f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.913953] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52ae6f3-9f82-4c7b-b7be-f86404879fa4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.929372] env[62109]: DEBUG nova.compute.provider_tree [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.930101] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 6bcbff59ef3242bd9aa6340fb330f49c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 601.936962] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bcbff59ef3242bd9aa6340fb330f49c [ 601.948507] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 601.950713] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 1bd25e0dbf944eb1b921eb898339e611 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 601.994896] env[62109]: DEBUG nova.network.neutron [-] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.996119] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 022e32adc85643bfaf3ce546e84bfbf0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 602.006932] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 022e32adc85643bfaf3ce546e84bfbf0 [ 602.015732] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bd25e0dbf944eb1b921eb898339e611 [ 602.144952] env[62109]: INFO nova.compute.manager [-] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Took 1.24 seconds to deallocate network for instance. [ 602.146271] env[62109]: DEBUG nova.compute.claims [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 602.146271] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.360040] env[62109]: DEBUG nova.network.neutron [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.425729] env[62109]: DEBUG nova.network.neutron [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.426309] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] Expecting reply to msg 085fe8d0519a40bfbec36192dd6b70c9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 602.432619] env[62109]: DEBUG nova.scheduler.client.report [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.435105] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 724cc8d14c324959be31d317313dcd43 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 602.437537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 085fe8d0519a40bfbec36192dd6b70c9 [ 602.456116] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 3fb86c71f65e4b9c82acd95e3bd7a850 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 602.471195] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 724cc8d14c324959be31d317313dcd43 [ 602.499721] env[62109]: INFO nova.compute.manager [-] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Took 1.03 seconds to deallocate network for instance. [ 602.503957] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg e649805f8b6b4188b2db2e38ac932ba8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 602.505258] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fb86c71f65e4b9c82acd95e3bd7a850 [ 602.545010] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e649805f8b6b4188b2db2e38ac932ba8 [ 602.566015] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "3a4e1dcc-610f-4037-94e9-c9815c12ed1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.566280] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "3a4e1dcc-610f-4037-94e9-c9815c12ed1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.600095] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Successfully created port: 398e9cea-37a5-4265-b56d-a19344b26784 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.930131] env[62109]: DEBUG oslo_concurrency.lockutils [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] Releasing lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.930131] env[62109]: DEBUG nova.compute.manager [req-d7c8a153-fccb-41cd-9277-95467f936ca1 req-8d1f0886-9aaf-4a32-93f1-577beccbc8e2 service nova] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Received event network-vif-deleted-f8b581d8-499a-4fb9-841d-9a1f886b5fda {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 602.939382] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.943022] env[62109]: ERROR nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Traceback (most recent call last): [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self.driver.spawn(context, instance, image_meta, [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] vm_ref = self.build_virtual_machine(instance, [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.943022] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] for vif in network_info: [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] return self._sync_wrapper(fn, *args, **kwargs) [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self.wait() [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self[:] = self._gt.wait() [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] return self._exit_event.wait() [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] result = hub.switch() [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.943470] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] return self.greenlet.switch() [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] result = function(*args, **kwargs) [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] return func(*args, **kwargs) [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] raise e [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] nwinfo = self.network_api.allocate_for_instance( [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] created_port_ids = self._update_ports_for_instance( [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] with excutils.save_and_reraise_exception(): [ 602.943834] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] self.force_reraise() [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] raise self.value [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] updated_port = self._update_port( [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] _ensure_no_port_binding_failure(port) [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] raise exception.PortBindingFailed(port_id=port['id']) [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] nova.exception.PortBindingFailed: Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. [ 602.944253] env[62109]: ERROR nova.compute.manager [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] [ 602.944638] env[62109]: DEBUG nova.compute.utils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 602.944638] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.357s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.944638] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 6f98d08aab984a31bb48ffd98bfe86c6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 602.945147] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Build of instance 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd was re-scheduled: Binding failed for port 636f6a96-849f-411e-a602-2d048aa9867a, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 602.946043] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 602.946225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Acquiring lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.946397] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Acquired lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.946539] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 602.947319] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 3907be635cde4d109423eae88322f342 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 602.959487] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 602.962148] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3907be635cde4d109423eae88322f342 [ 602.992967] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.993337] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.993419] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.993540] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.993678] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.993819] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.994026] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.994305] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.994513] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.995050] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.995146] env[62109]: DEBUG nova.virt.hardware [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.996057] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be7db7f-5422-463b-8bbe-779fad7a7016 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.005659] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9e969c-1123-49f4-892b-1081c1bac7c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.010109] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f98d08aab984a31bb48ffd98bfe86c6 [ 603.012237] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.591651] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.769685] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4f1efe-4422-40ef-8c81-aa7dd6d1b5a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.779683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ac6d26-0eb5-4d3f-b55b-b00dcc5534ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.818545] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.819206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 92e95e9d612240219999d5cfd9dc8677 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 603.836734] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76241a8-2554-4a22-a64a-e0dc85fe63f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.844746] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041a1751-2e64-437c-baf6-6963bd040b4d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.849022] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92e95e9d612240219999d5cfd9dc8677 [ 603.859254] env[62109]: DEBUG nova.compute.provider_tree [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.860082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg f4f6b9ca939845dcbfa18a07243e3669 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 603.868632] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4f6b9ca939845dcbfa18a07243e3669 [ 604.344193] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Releasing lock "refresh_cache-32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.344193] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 604.344193] env[62109]: DEBUG nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 604.344193] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 604.363261] env[62109]: DEBUG nova.scheduler.client.report [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 604.365819] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg c648c2f02bae4ca9ae1c90edbc9d305c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 604.379156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c648c2f02bae4ca9ae1c90edbc9d305c [ 604.442520] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.443018] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 4821a29ada6d40878cdb0983c209c307 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 604.451883] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4821a29ada6d40878cdb0983c209c307 [ 604.868911] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.927s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.869623] env[62109]: ERROR nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Traceback (most recent call last): [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self.driver.spawn(context, instance, image_meta, [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self._vmops.spawn(context, instance, image_meta, injected_files, [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] vm_ref = self.build_virtual_machine(instance, [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] vif_infos = vmwarevif.get_vif_info(self._session, [ 604.869623] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] for vif in network_info: [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] return self._sync_wrapper(fn, *args, **kwargs) [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self.wait() [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self[:] = self._gt.wait() [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] return self._exit_event.wait() [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] result = hub.switch() [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 604.870000] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] return self.greenlet.switch() [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] result = function(*args, **kwargs) [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] return func(*args, **kwargs) [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] raise e [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] nwinfo = self.network_api.allocate_for_instance( [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] created_port_ids = self._update_ports_for_instance( [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] with excutils.save_and_reraise_exception(): [ 604.870361] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] self.force_reraise() [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] raise self.value [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] updated_port = self._update_port( [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] _ensure_no_port_binding_failure(port) [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] raise exception.PortBindingFailed(port_id=port['id']) [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] nova.exception.PortBindingFailed: Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. [ 604.870726] env[62109]: ERROR nova.compute.manager [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] [ 604.871034] env[62109]: DEBUG nova.compute.utils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 604.871662] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.050s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.873473] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 4029a1b9a73c423096acf2f290c675a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 604.874653] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Build of instance c742fcf9-ac27-4a04-81a2-d99741dba794 was re-scheduled: Binding failed for port 4bf704f8-170e-48fa-9d73-18fa03b4afe7, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 604.875089] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 604.875327] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.875472] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquired lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.875624] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 604.876036] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 95dd239f89fb4fdea8cb744ddfcacd00 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 604.882230] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95dd239f89fb4fdea8cb744ddfcacd00 [ 604.907883] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4029a1b9a73c423096acf2f290c675a9 [ 604.945369] env[62109]: DEBUG nova.network.neutron [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.945956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 8f8b730cc8674037a7c44c3090929e18 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 604.954659] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f8b730cc8674037a7c44c3090929e18 [ 605.375396] env[62109]: ERROR nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. [ 605.375396] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 605.375396] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.375396] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 605.375396] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 605.375396] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 605.375396] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 605.375396] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 605.375396] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.375396] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 605.375396] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.375396] env[62109]: ERROR nova.compute.manager raise self.value [ 605.375396] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 605.375396] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 605.375396] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.375396] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 605.375923] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.375923] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 605.375923] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. [ 605.375923] env[62109]: ERROR nova.compute.manager [ 605.375923] env[62109]: Traceback (most recent call last): [ 605.375923] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 605.375923] env[62109]: listener.cb(fileno) [ 605.375923] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.375923] env[62109]: result = function(*args, **kwargs) [ 605.375923] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 605.375923] env[62109]: return func(*args, **kwargs) [ 605.375923] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.375923] env[62109]: raise e [ 605.375923] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.375923] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 605.375923] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 605.375923] env[62109]: created_port_ids = self._update_ports_for_instance( [ 605.375923] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 605.375923] env[62109]: with excutils.save_and_reraise_exception(): [ 605.375923] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.375923] env[62109]: self.force_reraise() [ 605.375923] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.375923] env[62109]: raise self.value [ 605.375923] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 605.375923] env[62109]: updated_port = self._update_port( [ 605.375923] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.375923] env[62109]: _ensure_no_port_binding_failure(port) [ 605.375923] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.375923] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 605.376767] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. [ 605.376767] env[62109]: Removing descriptor: 16 [ 605.376767] env[62109]: ERROR nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Traceback (most recent call last): [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] yield resources [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self.driver.spawn(context, instance, image_meta, [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.376767] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] vm_ref = self.build_virtual_machine(instance, [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] for vif in network_info: [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] return self._sync_wrapper(fn, *args, **kwargs) [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self.wait() [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self[:] = self._gt.wait() [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] return self._exit_event.wait() [ 605.377160] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] result = hub.switch() [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] return self.greenlet.switch() [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] result = function(*args, **kwargs) [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] return func(*args, **kwargs) [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] raise e [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] nwinfo = self.network_api.allocate_for_instance( [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 605.377531] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] created_port_ids = self._update_ports_for_instance( [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] with excutils.save_and_reraise_exception(): [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self.force_reraise() [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] raise self.value [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] updated_port = self._update_port( [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] _ensure_no_port_binding_failure(port) [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.378010] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] raise exception.PortBindingFailed(port_id=port['id']) [ 605.378362] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] nova.exception.PortBindingFailed: Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. [ 605.378362] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] [ 605.378362] env[62109]: INFO nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Terminating instance [ 605.392581] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Acquiring lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.392812] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Acquired lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.392999] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 605.393454] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 0d8e0467ed684efaa64c808a7ba53848 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 605.402702] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d8e0467ed684efaa64c808a7ba53848 [ 605.455771] env[62109]: INFO nova.compute.manager [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] [instance: 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd] Took 1.11 seconds to deallocate network for instance. [ 605.457936] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg 353fc15a03ce49adb4b42a4139010586 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 605.460565] env[62109]: DEBUG nova.compute.manager [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Received event network-changed-398e9cea-37a5-4265-b56d-a19344b26784 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 605.461065] env[62109]: DEBUG nova.compute.manager [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Refreshing instance network info cache due to event network-changed-398e9cea-37a5-4265-b56d-a19344b26784. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 605.461065] env[62109]: DEBUG oslo_concurrency.lockutils [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] Acquiring lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.462522] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.505817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 353fc15a03ce49adb4b42a4139010586 [ 605.790073] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f735797-89bf-4a67-b45c-fc1a99051699 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.800707] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b482acc7-6eaf-4b77-80c8-09b25465aba6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.832075] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee467cd-922a-46eb-bcc4-d50ba4706e04 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.839458] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5e239c-64a3-43ef-aa1f-8b3749089f14 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.852681] env[62109]: DEBUG nova.compute.provider_tree [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.853219] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg bd729d6dc6da4d618927af6c497fe7d5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 605.857528] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.858126] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 21180adcb7384a79aea066a587eea43f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 605.862549] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd729d6dc6da4d618927af6c497fe7d5 [ 605.870157] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21180adcb7384a79aea066a587eea43f [ 605.967205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg a93fd16e10194beaaf21fe4bab4436df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 605.977981] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.014893] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a93fd16e10194beaaf21fe4bab4436df [ 606.362188] env[62109]: DEBUG nova.scheduler.client.report [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 606.362733] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 12247f3f882c48eabd9c9974263d5b04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.364527] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Releasing lock "refresh_cache-c742fcf9-ac27-4a04-81a2-d99741dba794" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.365171] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 606.365171] env[62109]: DEBUG nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.365171] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 606.378395] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12247f3f882c48eabd9c9974263d5b04 [ 606.379556] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.380052] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg f519c212d8054dde8256b5381bfa43ce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.393611] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f519c212d8054dde8256b5381bfa43ce [ 606.450016] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.450649] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg af25561075374b638635a2c89c18a49f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.459288] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af25561075374b638635a2c89c18a49f [ 606.505745] env[62109]: INFO nova.scheduler.client.report [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Deleted allocations for instance 32a8ec11-f3ca-4df2-8231-3ce68c06bbcd [ 606.512022] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Expecting reply to msg c0f6194f682545a98ac7a9af36f6f328 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.529229] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0f6194f682545a98ac7a9af36f6f328 [ 606.865663] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.994s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.866332] env[62109]: ERROR nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Traceback (most recent call last): [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self.driver.spawn(context, instance, image_meta, [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] vm_ref = self.build_virtual_machine(instance, [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] vif_infos = vmwarevif.get_vif_info(self._session, [ 606.866332] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] for vif in network_info: [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] return self._sync_wrapper(fn, *args, **kwargs) [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self.wait() [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self[:] = self._gt.wait() [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] return self._exit_event.wait() [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] result = hub.switch() [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 606.866749] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] return self.greenlet.switch() [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] result = function(*args, **kwargs) [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] return func(*args, **kwargs) [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] raise e [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] nwinfo = self.network_api.allocate_for_instance( [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] created_port_ids = self._update_ports_for_instance( [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] with excutils.save_and_reraise_exception(): [ 606.867158] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] self.force_reraise() [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] raise self.value [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] updated_port = self._update_port( [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] _ensure_no_port_binding_failure(port) [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] raise exception.PortBindingFailed(port_id=port['id']) [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] nova.exception.PortBindingFailed: Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. [ 606.867563] env[62109]: ERROR nova.compute.manager [instance: d24eec8f-565a-4a02-834c-267e633ebb12] [ 606.867946] env[62109]: DEBUG nova.compute.utils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 606.868265] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.876s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.869671] env[62109]: INFO nova.compute.claims [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 606.871191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 19b86f17e543469697aa18521023784a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.872928] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Build of instance d24eec8f-565a-4a02-834c-267e633ebb12 was re-scheduled: Binding failed for port 5cc66433-8c1c-4902-a2f5-0d2c2f4a5ec1, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 606.873485] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 606.873722] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Acquiring lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.873867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Acquired lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.874025] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 606.874384] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 748ad88d5601455eb5740af8d5f98aa8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.880486] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 748ad88d5601455eb5740af8d5f98aa8 [ 606.882162] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Releasing lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.882531] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 606.882714] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 606.883248] env[62109]: DEBUG oslo_concurrency.lockutils [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] Acquired lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.883422] env[62109]: DEBUG nova.network.neutron [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Refreshing network info cache for port 398e9cea-37a5-4265-b56d-a19344b26784 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 606.883782] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] Expecting reply to msg 7a818b2b684a4b2ba88976b087077599 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.884734] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab26a4ba-2835-4327-a094-e8142a7a27e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.892356] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a818b2b684a4b2ba88976b087077599 [ 606.895553] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f617adb-8f94-4188-8fd9-c78d3c2bf70e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.922491] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2fb7c1e4-d756-4528-914e-b924c5a3be38 could not be found. [ 606.922726] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 606.922902] env[62109]: INFO nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Took 0.04 seconds to destroy the instance on the hypervisor. [ 606.923136] env[62109]: DEBUG oslo.service.loopingcall [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.923338] env[62109]: DEBUG nova.compute.manager [-] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.923428] env[62109]: DEBUG nova.network.neutron [-] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 606.927400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19b86f17e543469697aa18521023784a [ 606.953931] env[62109]: DEBUG nova.network.neutron [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.954172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 67f43b161cec4579994fd800d829814a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.956195] env[62109]: DEBUG nova.network.neutron [-] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.956706] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3d6143906a4141f881ed3f333ae63178 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 606.964338] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d6143906a4141f881ed3f333ae63178 [ 606.966951] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67f43b161cec4579994fd800d829814a [ 607.014254] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53e75428-6f73-420a-ac14-a07867f8a23c tempest-ServersAdminNegativeTestJSON-53709098 tempest-ServersAdminNegativeTestJSON-53709098-project-member] Lock "32a8ec11-f3ca-4df2-8231-3ce68c06bbcd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.213s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.014873] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg eafce5209b414e0c90172dd9aac86193 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 607.028803] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eafce5209b414e0c90172dd9aac86193 [ 607.382607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 8208ece53fe04f268c6955f621f63035 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 607.387095] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8208ece53fe04f268c6955f621f63035 [ 607.412961] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 607.416658] env[62109]: DEBUG nova.network.neutron [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 607.456927] env[62109]: INFO nova.compute.manager [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: c742fcf9-ac27-4a04-81a2-d99741dba794] Took 1.09 seconds to deallocate network for instance. [ 607.458930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg bce62ff87e28400d91138c70f15925ad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 607.467080] env[62109]: DEBUG nova.network.neutron [-] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.467080] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 03bddf3e74f142db80fdeb759fd6239b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 607.478522] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03bddf3e74f142db80fdeb759fd6239b [ 607.515272] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bce62ff87e28400d91138c70f15925ad [ 607.516902] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 607.518861] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 9c33ee33df154f8cbfa58c0e6283fe9b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 607.559684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c33ee33df154f8cbfa58c0e6283fe9b [ 607.582450] env[62109]: DEBUG nova.network.neutron [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.583001] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] Expecting reply to msg c6062e067b8a4b518010425511155f6a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 607.591622] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6062e067b8a4b518010425511155f6a [ 607.729822] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.730360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 61f18afe17c04358bee23a0ece98f5b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 607.738516] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61f18afe17c04358bee23a0ece98f5b5 [ 607.964195] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 31beb42587964488b986059c3af2d2f2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 607.978661] env[62109]: INFO nova.compute.manager [-] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Took 1.05 seconds to deallocate network for instance. [ 607.979851] env[62109]: DEBUG nova.compute.claims [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 607.979851] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.997593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31beb42587964488b986059c3af2d2f2 [ 608.041352] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.087484] env[62109]: DEBUG oslo_concurrency.lockutils [req-5e31824d-f66b-4c0a-b710-a7cdf3ab80fe req-b78cc69c-6344-485e-b037-789d2101e0bb service nova] Releasing lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.214789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "0e018d70-d6dd-4f79-bb03-14b815645562" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.215090] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "0e018d70-d6dd-4f79-bb03-14b815645562" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.233775] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Releasing lock "refresh_cache-d24eec8f-565a-4a02-834c-267e633ebb12" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.234050] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 608.234281] env[62109]: DEBUG nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.234456] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 608.279119] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.286336] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 6cce1f6f16434ec598bbdf6ac5a27bf1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 608.297116] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cce1f6f16434ec598bbdf6ac5a27bf1 [ 608.321950] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65344105-8157-4125-bbba-4cf14b0dc9a4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.330341] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f162083-2a25-45c4-8f1d-43538a593ae6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.363091] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136a9cb4-1786-4e80-a537-85337f7b0641 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.372203] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d55b78f-dcbf-463c-8f55-ef0a16cf3bc7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.384261] env[62109]: DEBUG nova.compute.provider_tree [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 608.385001] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 12395dc52b184871b7b8b5bbd3d11789 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 608.416619] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12395dc52b184871b7b8b5bbd3d11789 [ 608.481003] env[62109]: DEBUG nova.compute.manager [req-7874b36b-a7ca-4284-8faf-e2d7a958d368 req-de8040ba-f093-4db8-b158-192fbdf02d0d service nova] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Received event network-vif-deleted-398e9cea-37a5-4265-b56d-a19344b26784 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 608.488983] env[62109]: INFO nova.scheduler.client.report [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Deleted allocations for instance c742fcf9-ac27-4a04-81a2-d99741dba794 [ 608.495140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 28ab7209ead74dcbad87777406f9cac7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 608.519168] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28ab7209ead74dcbad87777406f9cac7 [ 608.791388] env[62109]: DEBUG nova.network.neutron [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.791932] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg bfe638f7ec81459e80b9b447a9c0e51d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 608.803017] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfe638f7ec81459e80b9b447a9c0e51d [ 608.892454] env[62109]: DEBUG nova.scheduler.client.report [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 608.894982] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg e17d00ca4cba4c7e800701ad86691545 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 608.906893] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e17d00ca4cba4c7e800701ad86691545 [ 608.998894] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6dd56597-da9d-4a0f-a10b-4aa717800955 tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "c742fcf9-ac27-4a04-81a2-d99741dba794" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.044s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.999528] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg bbee3680345e434cb835eb98a5bf4cb9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 609.012366] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbee3680345e434cb835eb98a5bf4cb9 [ 609.294726] env[62109]: INFO nova.compute.manager [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] [instance: d24eec8f-565a-4a02-834c-267e633ebb12] Took 1.06 seconds to deallocate network for instance. [ 609.296545] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 37875b9b205b4ab3b641eeb69b06500b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 609.356513] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37875b9b205b4ab3b641eeb69b06500b [ 609.398043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.398568] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 609.400278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 0dd8add2ee5641158c1612952f22308d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 609.401288] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.891s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.402686] env[62109]: INFO nova.compute.claims [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 609.407719] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 73a4e0e022e14d448f70659d028ed88b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 609.441930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dd8add2ee5641158c1612952f22308d [ 609.453734] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73a4e0e022e14d448f70659d028ed88b [ 609.502497] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 609.504312] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 5aabb996fa874f979be423a02c071f2c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 609.539481] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aabb996fa874f979be423a02c071f2c [ 609.804154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg d96d750d81a94a7eb6fddbeeae986328 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 609.859300] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d96d750d81a94a7eb6fddbeeae986328 [ 609.910871] env[62109]: DEBUG nova.compute.utils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 609.911953] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 720be845b8054fb88c3313966232c63f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 609.913582] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 3e59a78b1c9e4eb394b0ad4a69b9756f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 609.914545] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 609.914711] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 609.921348] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e59a78b1c9e4eb394b0ad4a69b9756f [ 609.926979] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 720be845b8054fb88c3313966232c63f [ 609.975095] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "c9b2ced5-a77c-4bff-b115-ce5c523be630" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.975313] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "c9b2ced5-a77c-4bff-b115-ce5c523be630" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.025492] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.099512] env[62109]: DEBUG nova.policy [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '332259bb7d2841e385dc46a558da2fd9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5eead97e11a4688b0058d50d5bef6ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 610.339544] env[62109]: INFO nova.scheduler.client.report [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Deleted allocations for instance d24eec8f-565a-4a02-834c-267e633ebb12 [ 610.345574] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Expecting reply to msg 036117b74f3f40ffbddd2469e2ee1d3c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 610.363724] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 036117b74f3f40ffbddd2469e2ee1d3c [ 610.415863] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 610.417593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 79a67aa330e1428085faa5c92df9123f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 610.464981] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79a67aa330e1428085faa5c92df9123f [ 610.729930] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464019ca-8df1-4da7-9fe7-70083919dae0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.738026] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e369223-e6a3-412f-b7de-5fcd9df81ed6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.769637] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac27196-d6e3-42f9-a658-547cc4265682 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.776956] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179b3e9b-cc66-4dc3-b29c-e32a4b1993f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.791303] env[62109]: DEBUG nova.compute.provider_tree [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.791841] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg e99013609951426789cec89f72af9fcc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 610.801449] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Successfully created port: 4f949fd6-9396-4f81-896d-140fe0cabbdf {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 610.810340] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e99013609951426789cec89f72af9fcc [ 610.846927] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fd717bfe-f2b1-46c2-b84c-d116f213f015 tempest-FloatingIPsAssociationTestJSON-143793456 tempest-FloatingIPsAssociationTestJSON-143793456-project-member] Lock "d24eec8f-565a-4a02-834c-267e633ebb12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.425s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.847702] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 69691f7258ef4e40ab4bb9e7b101b0d8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 610.859339] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69691f7258ef4e40ab4bb9e7b101b0d8 [ 610.924409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 1ca7a4fda1364672956de3fe0cc5669b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 610.966784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ca7a4fda1364672956de3fe0cc5669b [ 611.302288] env[62109]: DEBUG nova.scheduler.client.report [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 611.302506] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg c921928236d245c1b1caab0f51a37050 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 611.337075] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c921928236d245c1b1caab0f51a37050 [ 611.357473] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 611.359699] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 087f6485504c46d4952dca15c1e6f800 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 611.431421] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 611.452512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 087f6485504c46d4952dca15c1e6f800 [ 611.484380] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 611.484637] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 611.484786] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 611.484966] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 611.485280] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 611.485280] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 611.485502] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 611.485634] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 611.485740] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 611.485961] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 611.486097] env[62109]: DEBUG nova.virt.hardware [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 611.487030] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da967b5a-f56a-4b73-ae81-763ccdcde2ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.499762] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123d8c3c-6250-417b-a9ce-d8a36550dcc9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.807646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.807646] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 611.807646] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 26f73ff55d6943eab4e5797ac88b4223 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 611.808484] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.851s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.809967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg a1de328b91d44df2bda335569bd0a296 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 611.854803] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26f73ff55d6943eab4e5797ac88b4223 [ 611.883535] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.891965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1de328b91d44df2bda335569bd0a296 [ 611.996651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "26a287d7-4602-4d83-8828-41870a49c343" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.996964] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "26a287d7-4602-4d83-8828-41870a49c343" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.317627] env[62109]: DEBUG nova.compute.utils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 612.317627] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 1b1e543344f84d6bb3037fd71bf13d6a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 612.317627] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 612.317627] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 612.326553] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b1e543344f84d6bb3037fd71bf13d6a [ 612.362679] env[62109]: DEBUG nova.policy [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73751885e6db46f99b73d3ca8786f135', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a65a1ee33b854b9c94073bc5e1316ebe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 612.717698] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3637a4-a107-4d2c-b786-92589e09a3c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.725265] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791ce64a-0670-46bf-a51b-22e9e84d84ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.772037] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa9ff9d-4c3d-4e48-8dbd-71f26051a037 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.780193] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a981386f-6fd5-4799-ad81-93a71048b41c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.794607] env[62109]: DEBUG nova.compute.provider_tree [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.795696] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 6f367ef6e6e140c8947e1828e56c3e5c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 612.802897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f367ef6e6e140c8947e1828e56c3e5c [ 612.822160] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 612.824424] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg b40687a160d24537840ba1155d8c26f5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 612.867991] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b40687a160d24537840ba1155d8c26f5 [ 613.134955] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Successfully created port: a786577c-7bcb-4451-83d7-073ce6e077ed {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 613.262548] env[62109]: ERROR nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. [ 613.262548] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 613.262548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.262548] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 613.262548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.262548] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 613.262548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.262548] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 613.262548] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.262548] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 613.262548] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.262548] env[62109]: ERROR nova.compute.manager raise self.value [ 613.262548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.262548] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 613.262548] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.262548] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 613.263107] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.263107] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 613.263107] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. [ 613.263107] env[62109]: ERROR nova.compute.manager [ 613.263107] env[62109]: Traceback (most recent call last): [ 613.263107] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 613.263107] env[62109]: listener.cb(fileno) [ 613.263107] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.263107] env[62109]: result = function(*args, **kwargs) [ 613.263107] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.263107] env[62109]: return func(*args, **kwargs) [ 613.263107] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.263107] env[62109]: raise e [ 613.263107] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.263107] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 613.263107] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.263107] env[62109]: created_port_ids = self._update_ports_for_instance( [ 613.263107] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.263107] env[62109]: with excutils.save_and_reraise_exception(): [ 613.263107] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.263107] env[62109]: self.force_reraise() [ 613.263107] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.263107] env[62109]: raise self.value [ 613.263107] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.263107] env[62109]: updated_port = self._update_port( [ 613.263107] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.263107] env[62109]: _ensure_no_port_binding_failure(port) [ 613.263107] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.263107] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 613.263973] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. [ 613.263973] env[62109]: Removing descriptor: 19 [ 613.263973] env[62109]: ERROR nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Traceback (most recent call last): [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] yield resources [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self.driver.spawn(context, instance, image_meta, [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.263973] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] vm_ref = self.build_virtual_machine(instance, [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] for vif in network_info: [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] return self._sync_wrapper(fn, *args, **kwargs) [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self.wait() [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self[:] = self._gt.wait() [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] return self._exit_event.wait() [ 613.264359] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] result = hub.switch() [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] return self.greenlet.switch() [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] result = function(*args, **kwargs) [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] return func(*args, **kwargs) [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] raise e [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] nwinfo = self.network_api.allocate_for_instance( [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.264750] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] created_port_ids = self._update_ports_for_instance( [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] with excutils.save_and_reraise_exception(): [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self.force_reraise() [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] raise self.value [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] updated_port = self._update_port( [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] _ensure_no_port_binding_failure(port) [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.265174] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] raise exception.PortBindingFailed(port_id=port['id']) [ 613.265560] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] nova.exception.PortBindingFailed: Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. [ 613.265560] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] [ 613.265560] env[62109]: INFO nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Terminating instance [ 613.266586] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.266746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquired lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.266908] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 613.267354] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 1aebb04a783540019a6b33d24c3a37fb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 613.279373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1aebb04a783540019a6b33d24c3a37fb [ 613.299230] env[62109]: DEBUG nova.scheduler.client.report [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 613.300609] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 01048c2fadc04aa78fc4b79354a6248f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 613.312063] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01048c2fadc04aa78fc4b79354a6248f [ 613.331001] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 5c2736401388400988356deec00b4321 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 613.389965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c2736401388400988356deec00b4321 [ 613.615900] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 613.615900] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 613.615900] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg f11a99b8ecec49d997bba53cd006c642 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 613.635694] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f11a99b8ecec49d997bba53cd006c642 [ 613.789128] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.812363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.812363] env[62109]: ERROR nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. [ 613.812363] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Traceback (most recent call last): [ 613.812363] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 613.812363] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self.driver.spawn(context, instance, image_meta, [ 613.812363] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 613.812363] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.812363] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.812363] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] vm_ref = self.build_virtual_machine(instance, [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] for vif in network_info: [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] return self._sync_wrapper(fn, *args, **kwargs) [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self.wait() [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self[:] = self._gt.wait() [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] return self._exit_event.wait() [ 613.812805] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] result = hub.switch() [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] return self.greenlet.switch() [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] result = function(*args, **kwargs) [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] return func(*args, **kwargs) [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] raise e [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] nwinfo = self.network_api.allocate_for_instance( [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.813178] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] created_port_ids = self._update_ports_for_instance( [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] with excutils.save_and_reraise_exception(): [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] self.force_reraise() [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] raise self.value [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] updated_port = self._update_port( [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] _ensure_no_port_binding_failure(port) [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.813592] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] raise exception.PortBindingFailed(port_id=port['id']) [ 613.813927] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] nova.exception.PortBindingFailed: Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. [ 613.813927] env[62109]: ERROR nova.compute.manager [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] [ 613.813927] env[62109]: DEBUG nova.compute.utils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 613.813927] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.343s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.813927] env[62109]: INFO nova.compute.claims [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.815515] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 743fff6a58954aebbafca1c194ff557f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 613.817565] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Build of instance 39901fc8-8fc8-4812-936e-0ded3811d61c was re-scheduled: Binding failed for port c9378bce-31f2-4b1a-a99e-35f01300a701, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 613.818169] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 613.818502] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Acquiring lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.818759] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Acquired lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.819032] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 613.819520] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 1265ebdd7c1749e99c0202f189179694 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 613.828393] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1265ebdd7c1749e99c0202f189179694 [ 613.832521] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 613.860638] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 613.860887] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 613.861041] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.861439] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 613.861439] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.861559] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 613.861697] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 613.861829] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 613.861991] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 613.862147] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 613.862353] env[62109]: DEBUG nova.virt.hardware [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.863211] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bcb06f-7c33-4594-8b71-53990cdae859 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.866374] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 743fff6a58954aebbafca1c194ff557f [ 613.873782] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c99508-7da2-415f-a5d2-2c25da34a2e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.924533] env[62109]: DEBUG nova.compute.manager [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Received event network-changed-4f949fd6-9396-4f81-896d-140fe0cabbdf {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 613.924533] env[62109]: DEBUG nova.compute.manager [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Refreshing instance network info cache due to event network-changed-4f949fd6-9396-4f81-896d-140fe0cabbdf. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 613.924533] env[62109]: DEBUG oslo_concurrency.lockutils [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] Acquiring lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.936959] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.936959] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 89f653052d7d4b60a7af06ba7e55e140 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 613.946489] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89f653052d7d4b60a7af06ba7e55e140 [ 613.994343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Acquiring lock "dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.994343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Lock "dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.120611] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.120818] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 614.120909] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 614.121527] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ac5d81a8d4bb45b89eb41652c72853fa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 614.138040] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac5d81a8d4bb45b89eb41652c72853fa [ 614.322624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 25a60ce0c9834964b80bf4ba7db4c84f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 614.331444] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25a60ce0c9834964b80bf4ba7db4c84f [ 614.342290] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.438573] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Releasing lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.439076] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 614.439278] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 614.439582] env[62109]: DEBUG oslo_concurrency.lockutils [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] Acquired lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.439759] env[62109]: DEBUG nova.network.neutron [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Refreshing network info cache for port 4f949fd6-9396-4f81-896d-140fe0cabbdf {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 614.440247] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] Expecting reply to msg 5b9e7ba7ee074a6eaceb101f1000883e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 614.441033] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b296ac2f-08cf-4ec5-83b3-fd73dcd81a55 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.450745] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b9e7ba7ee074a6eaceb101f1000883e [ 614.453742] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9536ec2-9536-4d78-b6da-b95f2d4f68ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.475572] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f1239cdd-d1b3-4494-8204-0fe150737579 could not be found. [ 614.475828] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 614.476238] env[62109]: INFO nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Took 0.04 seconds to destroy the instance on the hypervisor. [ 614.476327] env[62109]: DEBUG oslo.service.loopingcall [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.476517] env[62109]: DEBUG nova.compute.manager [-] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 614.476605] env[62109]: DEBUG nova.network.neutron [-] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 614.532829] env[62109]: DEBUG nova.network.neutron [-] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.533648] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fe44580c07eb4a6294856aa3a21c7a1c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 614.544432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe44580c07eb4a6294856aa3a21c7a1c [ 614.621025] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.621618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 4e189c66beec41838b135a2ce2147b6e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 614.625863] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 614.626135] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 614.626491] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 614.626680] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 614.626832] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 614.633541] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e189c66beec41838b135a2ce2147b6e [ 614.665045] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Acquiring lock "29715a53-7a71-4708-b522-e678fe5bd6a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.665045] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Lock "29715a53-7a71-4708-b522-e678fe5bd6a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.678041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "refresh_cache-9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.678194] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquired lock "refresh_cache-9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.678346] env[62109]: DEBUG nova.network.neutron [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 614.678498] env[62109]: DEBUG nova.objects.instance [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lazy-loading 'info_cache' on Instance uuid 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 614.679277] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg cce403f98b2c4ee8bca274f11f6fd850 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 614.702937] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cce403f98b2c4ee8bca274f11f6fd850 [ 614.974813] env[62109]: DEBUG nova.network.neutron [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.035851] env[62109]: DEBUG nova.network.neutron [-] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.036415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 87e00543f66d4f0d89ee0db7b44a86f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.049551] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87e00543f66d4f0d89ee0db7b44a86f7 [ 615.126450] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Releasing lock "refresh_cache-39901fc8-8fc8-4812-936e-0ded3811d61c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.126450] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 615.126450] env[62109]: DEBUG nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 615.126709] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 615.183456] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 38cfac70afd24cc2b036b996914cfd98 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.191657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38cfac70afd24cc2b036b996914cfd98 [ 615.247516] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.248172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 843739d817ee443eae5adad3f352de26 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.255120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 843739d817ee443eae5adad3f352de26 [ 615.289207] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0cfe5b-c7cd-4756-bbac-7b6f5a80974c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.295604] env[62109]: DEBUG nova.network.neutron [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.296140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] Expecting reply to msg 199be554b4cb496abfcdeca570e8b361 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.297580] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7582767a-2e35-4e9b-8e53-e7835abce97d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.328920] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 199be554b4cb496abfcdeca570e8b361 [ 615.330169] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0f18fe-0ae5-4a8f-b9b5-8e76f1075681 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.338173] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2adc6ff-4a70-4079-8abc-e41c7b0bb8ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.353477] env[62109]: DEBUG nova.compute.provider_tree [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.354046] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg bdb839cb390f4f1ca5dfc1eafb483068 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.362537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdb839cb390f4f1ca5dfc1eafb483068 [ 615.540735] env[62109]: INFO nova.compute.manager [-] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Took 1.06 seconds to deallocate network for instance. [ 615.544381] env[62109]: DEBUG nova.compute.claims [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 615.544580] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.617692] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "446bd52c-4ffb-4e77-89fb-3e8535ceb4af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.617954] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "446bd52c-4ffb-4e77-89fb-3e8535ceb4af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.750387] env[62109]: DEBUG nova.network.neutron [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.750965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 9bf9994b9bbd4cacb54338d0e273785c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.758994] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bf9994b9bbd4cacb54338d0e273785c [ 615.784706] env[62109]: ERROR nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. [ 615.784706] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 615.784706] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.784706] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 615.784706] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 615.784706] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 615.784706] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 615.784706] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 615.784706] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.784706] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 615.784706] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.784706] env[62109]: ERROR nova.compute.manager raise self.value [ 615.784706] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 615.784706] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 615.784706] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.784706] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 615.785231] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.785231] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 615.785231] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. [ 615.785231] env[62109]: ERROR nova.compute.manager [ 615.785231] env[62109]: Traceback (most recent call last): [ 615.785231] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 615.785231] env[62109]: listener.cb(fileno) [ 615.785231] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 615.785231] env[62109]: result = function(*args, **kwargs) [ 615.785231] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 615.785231] env[62109]: return func(*args, **kwargs) [ 615.785231] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 615.785231] env[62109]: raise e [ 615.785231] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.785231] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 615.785231] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 615.785231] env[62109]: created_port_ids = self._update_ports_for_instance( [ 615.785231] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 615.785231] env[62109]: with excutils.save_and_reraise_exception(): [ 615.785231] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.785231] env[62109]: self.force_reraise() [ 615.785231] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.785231] env[62109]: raise self.value [ 615.785231] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 615.785231] env[62109]: updated_port = self._update_port( [ 615.785231] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.785231] env[62109]: _ensure_no_port_binding_failure(port) [ 615.785231] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.785231] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 615.786042] env[62109]: nova.exception.PortBindingFailed: Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. [ 615.786042] env[62109]: Removing descriptor: 16 [ 615.786042] env[62109]: ERROR nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Traceback (most recent call last): [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] yield resources [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self.driver.spawn(context, instance, image_meta, [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 615.786042] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] vm_ref = self.build_virtual_machine(instance, [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] vif_infos = vmwarevif.get_vif_info(self._session, [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] for vif in network_info: [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] return self._sync_wrapper(fn, *args, **kwargs) [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self.wait() [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self[:] = self._gt.wait() [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] return self._exit_event.wait() [ 615.786374] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] result = hub.switch() [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] return self.greenlet.switch() [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] result = function(*args, **kwargs) [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] return func(*args, **kwargs) [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] raise e [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] nwinfo = self.network_api.allocate_for_instance( [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 615.786772] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] created_port_ids = self._update_ports_for_instance( [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] with excutils.save_and_reraise_exception(): [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self.force_reraise() [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] raise self.value [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] updated_port = self._update_port( [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] _ensure_no_port_binding_failure(port) [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.787228] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] raise exception.PortBindingFailed(port_id=port['id']) [ 615.787613] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] nova.exception.PortBindingFailed: Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. [ 615.787613] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] [ 615.787613] env[62109]: INFO nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Terminating instance [ 615.788559] env[62109]: DEBUG nova.network.neutron [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.788661] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 13368fabc1044a9786edb1cbc99818b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.794386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Acquiring lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.794386] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Acquired lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.794386] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 615.794386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg e776aa63a9de446f82a2a56cf2153302 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.799854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13368fabc1044a9786edb1cbc99818b0 [ 615.800572] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e776aa63a9de446f82a2a56cf2153302 [ 615.802332] env[62109]: DEBUG oslo_concurrency.lockutils [req-6eaa9f43-6a8b-4478-9260-dc90ec02a60b req-f1010970-ec8c-4509-9a4e-86d93d6b4620 service nova] Releasing lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.842766] env[62109]: DEBUG nova.compute.manager [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Received event network-changed-a786577c-7bcb-4451-83d7-073ce6e077ed {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 615.842766] env[62109]: DEBUG nova.compute.manager [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Refreshing instance network info cache due to event network-changed-a786577c-7bcb-4451-83d7-073ce6e077ed. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 615.842766] env[62109]: DEBUG oslo_concurrency.lockutils [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] Acquiring lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.856284] env[62109]: DEBUG nova.scheduler.client.report [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 615.858772] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 4c44cf42e10c42ac8fdaf8f8fbe6ce62 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 615.887809] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c44cf42e10c42ac8fdaf8f8fbe6ce62 [ 616.253334] env[62109]: INFO nova.compute.manager [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] [instance: 39901fc8-8fc8-4812-936e-0ded3811d61c] Took 1.13 seconds to deallocate network for instance. [ 616.255156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 8ec90e72ea9c42f2acf99649d6f501b4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.303794] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ec90e72ea9c42f2acf99649d6f501b4 [ 616.332039] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 616.361731] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.362265] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 616.363902] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg ca6bf25e58ba4c258c36e4f0c148303e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.365238] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.332s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.372402] env[62109]: INFO nova.compute.claims [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.372402] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg d5e21e34b2664e62b2dec0ac2d615211 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.379167] env[62109]: DEBUG nova.compute.manager [req-07229a56-bbb9-4246-8762-ff336e23556d req-79055a73-f6be-4f41-abb0-9cb4c6dec41f service nova] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Received event network-vif-deleted-4f949fd6-9396-4f81-896d-140fe0cabbdf {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 616.400920] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca6bf25e58ba4c258c36e4f0c148303e [ 616.413193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5e21e34b2664e62b2dec0ac2d615211 [ 616.435373] env[62109]: DEBUG nova.network.neutron [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.435890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ac772ac1141a46f99b9f384589c34431 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.453288] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac772ac1141a46f99b9f384589c34431 [ 616.605258] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.605894] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 8fc6f25e7de24281a7cd0c21299fb39c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.615395] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fc6f25e7de24281a7cd0c21299fb39c [ 616.760589] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg c8d34fe98eb0432ea826791d60aee37b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.819990] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8d34fe98eb0432ea826791d60aee37b [ 616.874489] env[62109]: DEBUG nova.compute.utils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 616.875131] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg f77494fa94c9425ea1de09d50425f603 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.877193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 8dca8f80fd414832ac0debdefcbbdbc9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.878021] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 616.879891] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 616.898872] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dca8f80fd414832ac0debdefcbbdbc9 [ 616.908703] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f77494fa94c9425ea1de09d50425f603 [ 616.940065] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Releasing lock "refresh_cache-9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.943507] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 616.943507] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.943507] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.943507] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.943507] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.943507] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.944061] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.944061] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 616.944061] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.944061] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg a9f2a9d309e4472e84f5f79a96580ad7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 616.965174] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9f2a9d309e4472e84f5f79a96580ad7 [ 616.990958] env[62109]: DEBUG nova.policy [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '977af06e03fb4180b6c5cfd5ae8aa0cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '003b76634a314c8ba271d8ff8a84f4cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 617.108352] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Releasing lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.108949] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 617.109201] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 617.109605] env[62109]: DEBUG oslo_concurrency.lockutils [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] Acquired lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.109827] env[62109]: DEBUG nova.network.neutron [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Refreshing network info cache for port a786577c-7bcb-4451-83d7-073ce6e077ed {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 617.110491] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] Expecting reply to msg 4cd114363d464e818c7b73ceb31eefe0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.111559] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4fca727-b789-420d-b1aa-45d25cfcc4b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.118854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cd114363d464e818c7b73ceb31eefe0 [ 617.122590] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54eceaf9-9057-45b0-afa3-9e16bda3973c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.150453] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ce54ba3d-2cd5-4400-b334-8443ef73bbff could not be found. [ 617.150749] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 617.150934] env[62109]: INFO nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Took 0.04 seconds to destroy the instance on the hypervisor. [ 617.151104] env[62109]: DEBUG oslo.service.loopingcall [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 617.151598] env[62109]: DEBUG nova.compute.manager [-] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 617.151689] env[62109]: DEBUG nova.network.neutron [-] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 617.171029] env[62109]: DEBUG nova.network.neutron [-] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.171607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9c74bef2b7924847a38b5051c4df104c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.179341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c74bef2b7924847a38b5051c4df104c [ 617.308101] env[62109]: INFO nova.scheduler.client.report [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Deleted allocations for instance 39901fc8-8fc8-4812-936e-0ded3811d61c [ 617.314117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Expecting reply to msg 719dec19a0754a1483ea0315133f0f04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.326539] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 719dec19a0754a1483ea0315133f0f04 [ 617.366521] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Successfully created port: cbcafabe-4af3-498d-b675-7d4cb156228f {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.382422] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 617.384162] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 0f014303d5db4b95ae490b3c48ce5dc3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.440070] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f014303d5db4b95ae490b3c48ce5dc3 [ 617.449037] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.527909] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Acquiring lock "6f8e35f3-4b35-449c-9e60-1e0624f41cd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.527909] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Lock "6f8e35f3-4b35-449c-9e60-1e0624f41cd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.652398] env[62109]: DEBUG nova.network.neutron [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.678227] env[62109]: DEBUG nova.network.neutron [-] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.678227] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dd56ecfeef4844b889cd9714335cf59b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.684999] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd56ecfeef4844b889cd9714335cf59b [ 617.777302] env[62109]: DEBUG nova.network.neutron [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.777803] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] Expecting reply to msg 528f5eb8b44d46c8bdb88d9d6507b63e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.785488] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 528f5eb8b44d46c8bdb88d9d6507b63e [ 617.823255] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0393855-6a43-4693-ac68-c8f9bbb990a2 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993 tempest-FloatingIPsAssociationNegativeTestJSON-1419858993-project-member] Lock "39901fc8-8fc8-4812-936e-0ded3811d61c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.586s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.827010] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 3c79093a3c6f4ebaae83b8383eb14d77 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.841120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c79093a3c6f4ebaae83b8383eb14d77 [ 617.870928] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97298031-3238-4d26-ae48-db5da0b516ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.877825] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65aeebce-529e-4c6e-9254-875374be5e10 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.917110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 7108c468ebd64b49bffbd00c54947683 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.929254] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c1ecd7-ac77-4ba6-aca5-84b32d377ddc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.937023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8d2b56-8661-4f2a-9837-17bffccce83d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.951281] env[62109]: DEBUG nova.compute.provider_tree [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.952842] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 2d8bf02e03474b3da6012073d799ef39 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 617.957892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7108c468ebd64b49bffbd00c54947683 [ 617.959507] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d8bf02e03474b3da6012073d799ef39 [ 618.182196] env[62109]: INFO nova.compute.manager [-] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Took 1.03 seconds to deallocate network for instance. [ 618.183655] env[62109]: DEBUG nova.compute.claims [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 618.183830] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.283117] env[62109]: DEBUG oslo_concurrency.lockutils [req-6b64b74a-3ddd-4393-868a-5952e8921278 req-3fad9883-4f2e-4e5d-80d8-5a8f57ce762d service nova] Releasing lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.330050] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 618.331914] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 12057751ef3a48a7933acbab9bc94f27 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 618.378950] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12057751ef3a48a7933acbab9bc94f27 [ 618.429517] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 618.456750] env[62109]: DEBUG nova.scheduler.client.report [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.459487] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 3ec5daaac1ff40948a6b2f9e22aaf46c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 618.464296] env[62109]: DEBUG nova.compute.manager [req-e68768a0-48eb-44b6-9428-e80adfab7e37 req-93c6a860-5158-4c1e-8a9d-4f9960d51dec service nova] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Received event network-vif-deleted-a786577c-7bcb-4451-83d7-073ce6e077ed {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 618.469629] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.469771] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.469985] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.470248] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.470391] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.470579] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.470830] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.471019] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.471222] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.471445] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.471694] env[62109]: DEBUG nova.virt.hardware [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.472878] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c24f94-92e4-4b55-9e85-90ca0edc83c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.483929] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ec5daaac1ff40948a6b2f9e22aaf46c [ 618.485536] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2e00e6-8aed-4237-a208-6a8cf20dc795 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.850794] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.865615] env[62109]: ERROR nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. [ 618.865615] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 618.865615] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.865615] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 618.865615] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.865615] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 618.865615] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.865615] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 618.865615] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.865615] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 618.865615] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.865615] env[62109]: ERROR nova.compute.manager raise self.value [ 618.865615] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.865615] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 618.865615] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.865615] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 618.866163] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.866163] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 618.866163] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. [ 618.866163] env[62109]: ERROR nova.compute.manager [ 618.866163] env[62109]: Traceback (most recent call last): [ 618.866163] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 618.866163] env[62109]: listener.cb(fileno) [ 618.866163] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.866163] env[62109]: result = function(*args, **kwargs) [ 618.866163] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.866163] env[62109]: return func(*args, **kwargs) [ 618.866163] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.866163] env[62109]: raise e [ 618.866163] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.866163] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 618.866163] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.866163] env[62109]: created_port_ids = self._update_ports_for_instance( [ 618.866163] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.866163] env[62109]: with excutils.save_and_reraise_exception(): [ 618.866163] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.866163] env[62109]: self.force_reraise() [ 618.866163] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.866163] env[62109]: raise self.value [ 618.866163] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.866163] env[62109]: updated_port = self._update_port( [ 618.866163] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.866163] env[62109]: _ensure_no_port_binding_failure(port) [ 618.866163] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.866163] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 618.867070] env[62109]: nova.exception.PortBindingFailed: Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. [ 618.867070] env[62109]: Removing descriptor: 16 [ 618.867070] env[62109]: ERROR nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Traceback (most recent call last): [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] yield resources [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self.driver.spawn(context, instance, image_meta, [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.867070] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] vm_ref = self.build_virtual_machine(instance, [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] for vif in network_info: [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] return self._sync_wrapper(fn, *args, **kwargs) [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self.wait() [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self[:] = self._gt.wait() [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] return self._exit_event.wait() [ 618.867444] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] result = hub.switch() [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] return self.greenlet.switch() [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] result = function(*args, **kwargs) [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] return func(*args, **kwargs) [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] raise e [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] nwinfo = self.network_api.allocate_for_instance( [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.867947] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] created_port_ids = self._update_ports_for_instance( [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] with excutils.save_and_reraise_exception(): [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self.force_reraise() [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] raise self.value [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] updated_port = self._update_port( [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] _ensure_no_port_binding_failure(port) [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.868367] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] raise exception.PortBindingFailed(port_id=port['id']) [ 618.868702] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] nova.exception.PortBindingFailed: Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. [ 618.868702] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] [ 618.868702] env[62109]: INFO nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Terminating instance [ 618.869153] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.870154] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquired lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.870333] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 618.870771] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 50da39abe0464d40b06898dd91dc1148 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 618.878110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50da39abe0464d40b06898dd91dc1148 [ 618.964276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.599s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.964813] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 618.966675] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 1c8e2afeed1540ce9a01eb06f07f77d9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 618.967691] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.821s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.969923] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg f36dff3e863644ed913a60730296c270 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 619.000399] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "c9a6d28b-52f8-4636-886a-c74f0900e761" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.000634] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "c9a6d28b-52f8-4636-886a-c74f0900e761" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.014817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f36dff3e863644ed913a60730296c270 [ 619.016688] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c8e2afeed1540ce9a01eb06f07f77d9 [ 619.042403] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.042660] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.080354] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "9f77d364-928f-4595-9253-8bb216b9215b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.080606] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "9f77d364-928f-4595-9253-8bb216b9215b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.393266] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.472831] env[62109]: DEBUG nova.compute.utils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.473541] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 1a40b77dde964e268b25de6d23575a67 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 619.477561] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 619.478018] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 619.483402] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a40b77dde964e268b25de6d23575a67 [ 619.506863] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.507493] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 34bcc086708b4e9ca848c6a911e4b489 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 619.516037] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34bcc086708b4e9ca848c6a911e4b489 [ 619.585194] env[62109]: DEBUG nova.policy [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23ccfeaefb6445009a1e73e9e8c5d07b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a15f084eff34d2b840889a457bdaeb3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 619.930936] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c5aac6-41eb-4d2b-a184-584cefbb0faf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.940645] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f6f136-7a43-4bf9-9277-705db72cbedd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.985271] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a32c47-ecf4-4fb1-9a66-9e9e268c8ebd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.985271] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 619.985271] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg e7c22bc52a424c1aaca3c25f07422ead in queue reply_7522b64acfeb4981b1f36928b040d568 [ 619.985271] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21a234c-64e4-4f1e-9b6c-bdb404c20cd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.998149] env[62109]: DEBUG nova.compute.provider_tree [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.004887] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 35aa9e9ba44e48f599baee8caa949cf6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 620.009776] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35aa9e9ba44e48f599baee8caa949cf6 [ 620.017010] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Releasing lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.017443] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 620.017631] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 620.018426] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47d51be7-7c6c-4a09-a155-23b700ab8821 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.030361] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec47f87-0991-4c0f-8aab-5022ba7ea633 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.042588] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7c22bc52a424c1aaca3c25f07422ead [ 620.054199] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f4a5c62-85f1-47ee-b702-1785bfe62f48 could not be found. [ 620.054468] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 620.054674] env[62109]: INFO nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Took 0.04 seconds to destroy the instance on the hypervisor. [ 620.054927] env[62109]: DEBUG oslo.service.loopingcall [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.055155] env[62109]: DEBUG nova.compute.manager [-] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.055250] env[62109]: DEBUG nova.network.neutron [-] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 620.080317] env[62109]: DEBUG nova.network.neutron [-] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.081013] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ccc2af95cbe04ad3b740bbbca8ec2802 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 620.088459] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccc2af95cbe04ad3b740bbbca8ec2802 [ 620.319168] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Successfully created port: 1d761917-ebce-43d1-b26b-0773dc004f35 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.485444] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 0a54dc8bc5034167b2adf64c078e0cc2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 620.502935] env[62109]: DEBUG nova.scheduler.client.report [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.505599] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 02926e167fc04405b50a2e840382c774 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 620.520481] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02926e167fc04405b50a2e840382c774 [ 620.523913] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a54dc8bc5034167b2adf64c078e0cc2 [ 620.582935] env[62109]: DEBUG nova.network.neutron [-] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.583400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d979c92c0f0a469e9a32ea87ccf390c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 620.593013] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d979c92c0f0a469e9a32ea87ccf390c2 [ 620.715827] env[62109]: DEBUG nova.compute.manager [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Received event network-changed-cbcafabe-4af3-498d-b675-7d4cb156228f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 620.716065] env[62109]: DEBUG nova.compute.manager [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Refreshing instance network info cache due to event network-changed-cbcafabe-4af3-498d-b675-7d4cb156228f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 620.716281] env[62109]: DEBUG oslo_concurrency.lockutils [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] Acquiring lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.716417] env[62109]: DEBUG oslo_concurrency.lockutils [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] Acquired lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.716569] env[62109]: DEBUG nova.network.neutron [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Refreshing network info cache for port cbcafabe-4af3-498d-b675-7d4cb156228f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 620.716983] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] Expecting reply to msg 46f1227ac2e54a6286b57b1ee35f1d39 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 620.724252] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46f1227ac2e54a6286b57b1ee35f1d39 [ 620.989459] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 621.012051] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.042s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.012051] env[62109]: ERROR nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. [ 621.012051] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Traceback (most recent call last): [ 621.012051] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.012051] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self.driver.spawn(context, instance, image_meta, [ 621.012051] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 621.012051] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.012051] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.012051] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] vm_ref = self.build_virtual_machine(instance, [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] for vif in network_info: [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] return self._sync_wrapper(fn, *args, **kwargs) [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self.wait() [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self[:] = self._gt.wait() [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] return self._exit_event.wait() [ 621.012383] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] result = hub.switch() [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] return self.greenlet.switch() [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] result = function(*args, **kwargs) [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] return func(*args, **kwargs) [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] raise e [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] nwinfo = self.network_api.allocate_for_instance( [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.012660] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] created_port_ids = self._update_ports_for_instance( [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] with excutils.save_and_reraise_exception(): [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] self.force_reraise() [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] raise self.value [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] updated_port = self._update_port( [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] _ensure_no_port_binding_failure(port) [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.012944] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] raise exception.PortBindingFailed(port_id=port['id']) [ 621.013417] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] nova.exception.PortBindingFailed: Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. [ 621.013417] env[62109]: ERROR nova.compute.manager [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] [ 621.013417] env[62109]: DEBUG nova.compute.utils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 621.013417] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.013417] env[62109]: DEBUG nova.objects.instance [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lazy-loading 'resources' on Instance uuid 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 621.013417] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 30b604a3326047a9af211671fcc466d8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 621.014230] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Build of instance d91c5dae-4ece-4718-a16b-534729f7ba49 was re-scheduled: Binding failed for port f8b581d8-499a-4fb9-841d-9a1f886b5fda, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 621.014580] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 621.015618] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquiring lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.015618] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Acquired lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.015618] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 621.015618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg b9fdc128251045c098bb4563d008c1e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 621.018491] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 621.018491] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 621.018491] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.018652] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 621.018792] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.018962] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 621.019175] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 621.019337] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 621.019482] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 621.019639] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 621.019804] env[62109]: DEBUG nova.virt.hardware [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 621.020826] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64a5076-dd36-476c-a566-fae2b499a1d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.023606] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30b604a3326047a9af211671fcc466d8 [ 621.024098] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9fdc128251045c098bb4563d008c1e5 [ 621.034764] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ea54d2-aef4-4d52-83fa-3382b7de58cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.084265] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 621.086685] env[62109]: INFO nova.compute.manager [-] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Took 1.03 seconds to deallocate network for instance. [ 621.089787] env[62109]: DEBUG nova.compute.claims [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 621.089787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.210600] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.210600] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 8e428e499a774daa9c226d580bc630a7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 621.226273] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e428e499a774daa9c226d580bc630a7 [ 621.244397] env[62109]: DEBUG nova.network.neutron [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 621.377420] env[62109]: DEBUG nova.compute.manager [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Received event network-changed-1d761917-ebce-43d1-b26b-0773dc004f35 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 621.377420] env[62109]: DEBUG nova.compute.manager [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Refreshing instance network info cache due to event network-changed-1d761917-ebce-43d1-b26b-0773dc004f35. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 621.377420] env[62109]: DEBUG oslo_concurrency.lockutils [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] Acquiring lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.377420] env[62109]: DEBUG oslo_concurrency.lockutils [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] Acquired lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.377420] env[62109]: DEBUG nova.network.neutron [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Refreshing network info cache for port 1d761917-ebce-43d1-b26b-0773dc004f35 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 621.377558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] Expecting reply to msg 585725cbf5b2463e8d79f82d9b763d72 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 621.377558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 585725cbf5b2463e8d79f82d9b763d72 [ 621.390763] env[62109]: DEBUG nova.network.neutron [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.390763] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] Expecting reply to msg 9835cb50b49248739e9a012a6bff98ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 621.398178] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9835cb50b49248739e9a012a6bff98ba [ 621.712672] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Releasing lock "refresh_cache-d91c5dae-4ece-4718-a16b-534729f7ba49" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.712966] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 621.713117] env[62109]: DEBUG nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 621.713226] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 621.751461] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 621.752113] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 0e4469684caf4ecfabafaac3676b341c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 621.769158] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e4469684caf4ecfabafaac3676b341c [ 621.795907] env[62109]: ERROR nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. [ 621.795907] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 621.795907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.795907] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 621.795907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.795907] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 621.795907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.795907] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 621.795907] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.795907] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 621.795907] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.795907] env[62109]: ERROR nova.compute.manager raise self.value [ 621.795907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.795907] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 621.795907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.795907] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 621.796504] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.796504] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 621.796504] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. [ 621.796504] env[62109]: ERROR nova.compute.manager [ 621.796504] env[62109]: Traceback (most recent call last): [ 621.796504] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 621.796504] env[62109]: listener.cb(fileno) [ 621.796504] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.796504] env[62109]: result = function(*args, **kwargs) [ 621.796504] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 621.796504] env[62109]: return func(*args, **kwargs) [ 621.796504] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.796504] env[62109]: raise e [ 621.796504] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.796504] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 621.796504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.796504] env[62109]: created_port_ids = self._update_ports_for_instance( [ 621.796504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.796504] env[62109]: with excutils.save_and_reraise_exception(): [ 621.796504] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.796504] env[62109]: self.force_reraise() [ 621.796504] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.796504] env[62109]: raise self.value [ 621.796504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.796504] env[62109]: updated_port = self._update_port( [ 621.796504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.796504] env[62109]: _ensure_no_port_binding_failure(port) [ 621.796504] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.796504] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 621.797294] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. [ 621.797294] env[62109]: Removing descriptor: 19 [ 621.797294] env[62109]: ERROR nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Traceback (most recent call last): [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] yield resources [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self.driver.spawn(context, instance, image_meta, [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.797294] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] vm_ref = self.build_virtual_machine(instance, [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] for vif in network_info: [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] return self._sync_wrapper(fn, *args, **kwargs) [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self.wait() [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self[:] = self._gt.wait() [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] return self._exit_event.wait() [ 621.797629] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] result = hub.switch() [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] return self.greenlet.switch() [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] result = function(*args, **kwargs) [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] return func(*args, **kwargs) [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] raise e [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] nwinfo = self.network_api.allocate_for_instance( [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.798036] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] created_port_ids = self._update_ports_for_instance( [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] with excutils.save_and_reraise_exception(): [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self.force_reraise() [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] raise self.value [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] updated_port = self._update_port( [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] _ensure_no_port_binding_failure(port) [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.798405] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] raise exception.PortBindingFailed(port_id=port['id']) [ 621.798709] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] nova.exception.PortBindingFailed: Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. [ 621.798709] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] [ 621.798709] env[62109]: INFO nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Terminating instance [ 621.802536] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.891254] env[62109]: DEBUG oslo_concurrency.lockutils [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] Releasing lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.891504] env[62109]: DEBUG nova.compute.manager [req-0694fba9-f052-419c-9dde-c6516ae27f18 req-ebb17ede-5d86-4bd4-b3d1-d312ae1b5288 service nova] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Received event network-vif-deleted-cbcafabe-4af3-498d-b675-7d4cb156228f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 621.892928] env[62109]: DEBUG nova.network.neutron [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 621.972874] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71247754-2964-433d-8fb2-b2d00dd46c08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.981646] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fce4c86-c6f9-41fb-8795-14b56c414192 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.985593] env[62109]: DEBUG nova.network.neutron [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.986028] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] Expecting reply to msg 033a48352d0d43b397f5f526733cc85a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 622.014740] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 033a48352d0d43b397f5f526733cc85a [ 622.016314] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfadba8-5f12-4c84-86e0-98ae77eeb26f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.024612] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4613d16a-8482-4a42-ae20-68b5b956859e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.040880] env[62109]: DEBUG nova.compute.provider_tree [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.041342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 2e62925bf68249198c01038864361a16 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 622.051967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e62925bf68249198c01038864361a16 [ 622.254665] env[62109]: DEBUG nova.network.neutron [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.255195] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 478b3929867b4224bcb664f157b4b9cf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 622.264759] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 478b3929867b4224bcb664f157b4b9cf [ 622.488208] env[62109]: DEBUG oslo_concurrency.lockutils [req-89bca210-d135-4d9e-96e6-60b11205c213 req-30d82fab-ac91-41ee-b86d-90ebf00887b1 service nova] Releasing lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.488488] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquired lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.488671] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 622.489154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 9b2fb824fb524bc99683dd7cc412388e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 622.498518] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b2fb824fb524bc99683dd7cc412388e [ 622.544084] env[62109]: DEBUG nova.scheduler.client.report [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 622.546538] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg ae9db6cbbafa49f198f6dadda3b39828 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 622.557993] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae9db6cbbafa49f198f6dadda3b39828 [ 622.759293] env[62109]: INFO nova.compute.manager [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] [instance: d91c5dae-4ece-4718-a16b-534729f7ba49] Took 1.05 seconds to deallocate network for instance. [ 622.761186] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg 4e114a29a5d644dbbd7ec64060483f4c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 622.812774] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e114a29a5d644dbbd7ec64060483f4c [ 623.009156] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.049407] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.036s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.052522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.072s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.056723] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 2eb090fa04c24b29bd1c462a4ba5831a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 623.086832] env[62109]: INFO nova.scheduler.client.report [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Deleted allocations for instance 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55 [ 623.092370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg a3d46333fc084abc82a0e4a27c437fa4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 623.124612] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.125237] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 68475c91836c4e8b96ae767becd9cd6b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 623.126664] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2eb090fa04c24b29bd1c462a4ba5831a [ 623.136000] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68475c91836c4e8b96ae767becd9cd6b [ 623.160881] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3d46333fc084abc82a0e4a27c437fa4 [ 623.265431] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg abda4112dae445ef8532809b03323b42 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 623.302913] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abda4112dae445ef8532809b03323b42 [ 623.477611] env[62109]: DEBUG nova.compute.manager [req-79b8a073-24b9-4e54-a410-92b0ffdb6eed req-13743d5c-ea07-4e95-aa43-a1f2657d7259 service nova] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Received event network-vif-deleted-1d761917-ebce-43d1-b26b-0773dc004f35 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 623.597240] env[62109]: DEBUG oslo_concurrency.lockutils [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Lock "9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.695s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.597588] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-98885000-fa81-443f-832e-b86f40c14155 tempest-ServerDiagnosticsV248Test-1910275868 tempest-ServerDiagnosticsV248Test-1910275868-project-member] Expecting reply to msg 44bf6bedb562453588348637e6c55b27 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 623.611336] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44bf6bedb562453588348637e6c55b27 [ 623.628800] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Releasing lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.629350] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 623.629549] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 623.632435] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-922df2d0-c902-44d6-a508-caf2fb242cd8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.641640] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db652283-598b-4a0b-8923-04f37b32e938 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.666391] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56f9bb28-2770-46aa-9d95-f60cdeae0967 could not be found. [ 623.666614] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 623.666787] env[62109]: INFO nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Took 0.04 seconds to destroy the instance on the hypervisor. [ 623.667047] env[62109]: DEBUG oslo.service.loopingcall [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 623.669692] env[62109]: DEBUG nova.compute.manager [-] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 623.669792] env[62109]: DEBUG nova.network.neutron [-] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 623.702179] env[62109]: DEBUG nova.network.neutron [-] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.702842] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 850cc1c1cb6a454c8adda17511ac8595 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 623.709799] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 850cc1c1cb6a454c8adda17511ac8595 [ 623.810839] env[62109]: INFO nova.scheduler.client.report [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Deleted allocations for instance d91c5dae-4ece-4718-a16b-534729f7ba49 [ 623.825031] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Expecting reply to msg e799e52a54724d95914ebfc8bb53ea87 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 623.836888] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e799e52a54724d95914ebfc8bb53ea87 [ 624.012658] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e44744-0de8-49a8-a88c-8ff01d85eaa2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.022323] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fe836b-4fbb-455d-9bc3-63e80adc84e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.061439] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ae3eec-7db6-4d5e-9cb3-431baae36f52 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.069554] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b4350e-9bbb-4c50-a884-23bd2eff8efe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.085735] env[62109]: DEBUG nova.compute.provider_tree [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.086001] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg e033e91fb1ea48a6a806beea550113cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 624.097260] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e033e91fb1ea48a6a806beea550113cd [ 624.205545] env[62109]: DEBUG nova.network.neutron [-] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.206056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 07c8961b629b4521a744462607bae67e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 624.216952] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07c8961b629b4521a744462607bae67e [ 624.327556] env[62109]: DEBUG oslo_concurrency.lockutils [None req-54275bfb-afdc-49f7-a6da-f87693fe4de7 tempest-DeleteServersAdminTestJSON-911911890 tempest-DeleteServersAdminTestJSON-911911890-project-member] Lock "d91c5dae-4ece-4718-a16b-534729f7ba49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.658s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.328241] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg b1b9825d53b64a5ba719ceb2c884dba3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 624.345243] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1b9825d53b64a5ba719ceb2c884dba3 [ 624.589480] env[62109]: DEBUG nova.scheduler.client.report [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 624.591758] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 3cdeb6ef4934461cb9f7ce157f379b13 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 624.607371] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cdeb6ef4934461cb9f7ce157f379b13 [ 624.709659] env[62109]: INFO nova.compute.manager [-] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Took 1.04 seconds to deallocate network for instance. [ 624.712406] env[62109]: DEBUG nova.compute.claims [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 624.712582] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.829915] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 624.831692] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 222b50a8c8384392acbcbcd94c4f1091 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 624.878091] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 222b50a8c8384392acbcbcd94c4f1091 [ 625.094268] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.042s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.094772] env[62109]: ERROR nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Traceback (most recent call last): [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self.driver.spawn(context, instance, image_meta, [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] vm_ref = self.build_virtual_machine(instance, [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.094772] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] for vif in network_info: [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] return self._sync_wrapper(fn, *args, **kwargs) [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self.wait() [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self[:] = self._gt.wait() [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] return self._exit_event.wait() [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] result = hub.switch() [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 625.095075] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] return self.greenlet.switch() [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] result = function(*args, **kwargs) [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] return func(*args, **kwargs) [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] raise e [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] nwinfo = self.network_api.allocate_for_instance( [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] created_port_ids = self._update_ports_for_instance( [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] with excutils.save_and_reraise_exception(): [ 625.095419] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] self.force_reraise() [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] raise self.value [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] updated_port = self._update_port( [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] _ensure_no_port_binding_failure(port) [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] raise exception.PortBindingFailed(port_id=port['id']) [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] nova.exception.PortBindingFailed: Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. [ 625.095720] env[62109]: ERROR nova.compute.manager [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] [ 625.096065] env[62109]: DEBUG nova.compute.utils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 625.096857] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.056s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.098673] env[62109]: INFO nova.compute.claims [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.100747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 6af2cd196f4943ff81bf1117b23efa6e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 625.102880] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Build of instance 2fb7c1e4-d756-4528-914e-b924c5a3be38 was re-scheduled: Binding failed for port 398e9cea-37a5-4265-b56d-a19344b26784, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 625.103497] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 625.103548] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Acquiring lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.103691] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Acquired lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.104095] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 625.104785] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 5f4469afefcf493a841f2013a61e685b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 625.112892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f4469afefcf493a841f2013a61e685b [ 625.179569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6af2cd196f4943ff81bf1117b23efa6e [ 625.353721] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.608115] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 71fa0518c06944a885a2abb6f2c66d84 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 625.622267] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71fa0518c06944a885a2abb6f2c66d84 [ 625.698584] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.980264] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.980805] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg d30d6b4fb2cb41128dcbf12502baa61c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 625.990345] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d30d6b4fb2cb41128dcbf12502baa61c [ 626.484368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Releasing lock "refresh_cache-2fb7c1e4-d756-4528-914e-b924c5a3be38" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.484610] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 626.484782] env[62109]: DEBUG nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 626.484951] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 626.517991] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.518613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 34e4f3c1c9784dbfbae1565ce2be41ce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 626.527214] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34e4f3c1c9784dbfbae1565ce2be41ce [ 626.580165] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58a6182-a06e-4245-b43b-3ce0b38ed0db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.588054] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218105a8-76ce-48e0-ba67-ddd783a9e064 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.617263] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f224fd1e-f75f-40c1-a5d5-16d78e02caa1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.624446] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b22ee0-dc5c-415d-9c62-f4cd7f9553a4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.637517] env[62109]: DEBUG nova.compute.provider_tree [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.638047] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 27c1d3f374a444d094837dfa24f54064 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 626.644857] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27c1d3f374a444d094837dfa24f54064 [ 626.917744] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2367e5f8708d4a46bee820b8369858a6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 626.936556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2367e5f8708d4a46bee820b8369858a6 [ 627.021437] env[62109]: DEBUG nova.network.neutron [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.021965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 60c7dc89848d4cc99d76657aa86df7ad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 627.032677] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60c7dc89848d4cc99d76657aa86df7ad [ 627.141023] env[62109]: DEBUG nova.scheduler.client.report [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 627.143596] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 42d285d2815140f483d25bc196ff97ed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 627.156908] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42d285d2815140f483d25bc196ff97ed [ 627.525229] env[62109]: INFO nova.compute.manager [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] [instance: 2fb7c1e4-d756-4528-914e-b924c5a3be38] Took 1.04 seconds to deallocate network for instance. [ 627.527269] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 51ade1e210b04cde828c8d41f047b5e3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 627.592290] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51ade1e210b04cde828c8d41f047b5e3 [ 627.646426] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.646964] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 627.648919] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg c2546aeb3d5544afae55d29f888b272a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 627.650343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.625s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.652736] env[62109]: INFO nova.compute.claims [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.654659] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg b4721dbf378843bcbf8f1ebdc6d0540c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 627.684310] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "832c9ce1-6344-485a-a9ef-6950d1c78ef9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.684310] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "832c9ce1-6344-485a-a9ef-6950d1c78ef9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.715537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4721dbf378843bcbf8f1ebdc6d0540c [ 627.717457] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2546aeb3d5544afae55d29f888b272a [ 628.033107] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg f6c5fc00bea1417f875138dc7cb8da04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 628.066473] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6c5fc00bea1417f875138dc7cb8da04 [ 628.158638] env[62109]: DEBUG nova.compute.utils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 628.159153] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 92fe6860ba0241b8a71aacb0a0188c40 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 628.161534] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 045f81db9e684e3fb51133503855ac4f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 628.162655] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 628.162830] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 628.170396] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 045f81db9e684e3fb51133503855ac4f [ 628.180748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92fe6860ba0241b8a71aacb0a0188c40 [ 628.316331] env[62109]: DEBUG nova.policy [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fe43f98ba574c19847bad2d5ed221c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6ebd77f41f294e62bf81eacf4e449d63', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 628.569767] env[62109]: INFO nova.scheduler.client.report [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Deleted allocations for instance 2fb7c1e4-d756-4528-914e-b924c5a3be38 [ 628.582190] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Expecting reply to msg 2d9049245611408bb7933684928eb84b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 628.594752] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d9049245611408bb7933684928eb84b [ 628.663244] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 628.665931] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 5c1630e64a294031aef2ac027d50e732 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 628.723829] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c1630e64a294031aef2ac027d50e732 [ 628.871552] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Successfully created port: 4757bd66-fea5-408a-b5a7-7b719b899f50 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.081567] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a2b2b44c-46a1-45bb-bf84-c88930f28c90 tempest-ImagesNegativeTestJSON-1745274774 tempest-ImagesNegativeTestJSON-1745274774-project-member] Lock "2fb7c1e4-d756-4528-914e-b924c5a3be38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.005s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.081567] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg a911384071ff47f1893014d81879dd67 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 629.093105] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a911384071ff47f1893014d81879dd67 [ 629.118271] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d236b9-6a8e-4178-ab45-314625d7d8cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.128170] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e376e2-929b-424b-816f-7fc758bd68ea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.172399] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 1eb5a946a8f24fefba7dc474dbc07109 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 629.174688] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7eaea8f-bbed-4f32-9976-1e723fbd69dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.183831] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52f4d87-7bd3-4e03-9fa7-bda92fc960bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.200924] env[62109]: DEBUG nova.compute.provider_tree [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.201080] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 564c5a15537f40d9a0fadfda9b0ed718 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 629.211618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 564c5a15537f40d9a0fadfda9b0ed718 [ 629.238832] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Acquiring lock "3d99c7df-b031-4187-988c-f642f79073d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.239081] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Lock "3d99c7df-b031-4187-988c-f642f79073d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.239595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1eb5a946a8f24fefba7dc474dbc07109 [ 629.583543] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 629.585487] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg c0a257157961449397d3d9b42575a4be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 629.624381] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0a257157961449397d3d9b42575a4be [ 629.675789] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 629.700109] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 629.700347] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 629.700495] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.700670] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 629.701015] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.701212] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 629.701420] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 629.701573] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 629.701730] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 629.701879] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 629.702039] env[62109]: DEBUG nova.virt.hardware [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 629.702864] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b49ed1-912b-49a7-8e9a-e7afa4967ab1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.706090] env[62109]: DEBUG nova.scheduler.client.report [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 629.708473] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 7ccdd257f2ee4ddaa2bbb55353eb0b3f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 629.715029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7904fa19-b5db-42b8-9eb7-fb5ab1d648b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.720058] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ccdd257f2ee4ddaa2bbb55353eb0b3f [ 630.105782] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.211166] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.211690] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 630.213392] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg aeafaa14c8ae4bda890d36c0ed275c57 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 630.216685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.333s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.218044] env[62109]: INFO nova.compute.claims [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 630.219677] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 1cfe146171ae4d4f979a2b92f0161f1e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 630.221462] env[62109]: DEBUG nova.compute.manager [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Received event network-changed-4757bd66-fea5-408a-b5a7-7b719b899f50 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 630.221543] env[62109]: DEBUG nova.compute.manager [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Refreshing instance network info cache due to event network-changed-4757bd66-fea5-408a-b5a7-7b719b899f50. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 630.221744] env[62109]: DEBUG oslo_concurrency.lockutils [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] Acquiring lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.221873] env[62109]: DEBUG oslo_concurrency.lockutils [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] Acquired lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.222022] env[62109]: DEBUG nova.network.neutron [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Refreshing network info cache for port 4757bd66-fea5-408a-b5a7-7b719b899f50 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 630.222370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] Expecting reply to msg 6e39fd4d9c7040598e1047761c7dfabf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 630.223723] env[62109]: ERROR nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. [ 630.223723] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 630.223723] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 630.223723] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 630.223723] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 630.223723] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 630.223723] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 630.223723] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 630.223723] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 630.223723] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 630.223723] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 630.223723] env[62109]: ERROR nova.compute.manager raise self.value [ 630.223723] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 630.223723] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 630.223723] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 630.223723] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 630.224235] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 630.224235] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 630.224235] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. [ 630.224235] env[62109]: ERROR nova.compute.manager [ 630.224235] env[62109]: Traceback (most recent call last): [ 630.224235] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 630.224235] env[62109]: listener.cb(fileno) [ 630.224235] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 630.224235] env[62109]: result = function(*args, **kwargs) [ 630.224235] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 630.224235] env[62109]: return func(*args, **kwargs) [ 630.224235] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 630.224235] env[62109]: raise e [ 630.224235] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 630.224235] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 630.224235] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 630.224235] env[62109]: created_port_ids = self._update_ports_for_instance( [ 630.224235] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 630.224235] env[62109]: with excutils.save_and_reraise_exception(): [ 630.224235] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 630.224235] env[62109]: self.force_reraise() [ 630.224235] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 630.224235] env[62109]: raise self.value [ 630.224235] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 630.224235] env[62109]: updated_port = self._update_port( [ 630.224235] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 630.224235] env[62109]: _ensure_no_port_binding_failure(port) [ 630.224235] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 630.224235] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 630.224967] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. [ 630.224967] env[62109]: Removing descriptor: 19 [ 630.225098] env[62109]: ERROR nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Traceback (most recent call last): [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] yield resources [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self.driver.spawn(context, instance, image_meta, [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] vm_ref = self.build_virtual_machine(instance, [ 630.225098] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] for vif in network_info: [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] return self._sync_wrapper(fn, *args, **kwargs) [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self.wait() [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self[:] = self._gt.wait() [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] return self._exit_event.wait() [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 630.225383] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] result = hub.switch() [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] return self.greenlet.switch() [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] result = function(*args, **kwargs) [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] return func(*args, **kwargs) [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] raise e [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] nwinfo = self.network_api.allocate_for_instance( [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] created_port_ids = self._update_ports_for_instance( [ 630.225730] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] with excutils.save_and_reraise_exception(): [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self.force_reraise() [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] raise self.value [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] updated_port = self._update_port( [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] _ensure_no_port_binding_failure(port) [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] raise exception.PortBindingFailed(port_id=port['id']) [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] nova.exception.PortBindingFailed: Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. [ 630.226060] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] [ 630.226365] env[62109]: INFO nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Terminating instance [ 630.227141] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Acquiring lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.243172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e39fd4d9c7040598e1047761c7dfabf [ 630.258610] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aeafaa14c8ae4bda890d36c0ed275c57 [ 630.266983] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cfe146171ae4d4f979a2b92f0161f1e [ 630.726817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg ea8f0d4169f3436db19b627a84efa66f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 630.728626] env[62109]: DEBUG nova.compute.utils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 630.729195] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg ed4b18e3bc5f4ba7a5311edbb4275948 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 630.731946] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 630.732124] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 630.740651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea8f0d4169f3436db19b627a84efa66f [ 630.741574] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed4b18e3bc5f4ba7a5311edbb4275948 [ 630.761882] env[62109]: DEBUG nova.network.neutron [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.816771] env[62109]: DEBUG nova.policy [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ca9c1043bfc469e8230e5c6bfc5f48a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b7ae63045184f9999aa13025fdc6b26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 630.864555] env[62109]: DEBUG nova.network.neutron [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.865044] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] Expecting reply to msg 0c1d8f1d4b3f4686a1858488a79d851e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 630.873998] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c1d8f1d4b3f4686a1858488a79d851e [ 631.207976] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Successfully created port: 0a638f3f-d212-4e30-8d09-d277b8a79f1f {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.234973] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 631.236740] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg b7a724517b75432c9ed33152c394b5ea in queue reply_7522b64acfeb4981b1f36928b040d568 [ 631.291190] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7a724517b75432c9ed33152c394b5ea [ 631.367064] env[62109]: DEBUG oslo_concurrency.lockutils [req-55cbfb67-9e05-47bc-a128-f298954d194e req-3fd8b0da-e7f7-48b4-ba8b-96c96640e7ba service nova] Releasing lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.367471] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Acquired lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.367908] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 631.368141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 2bbd56d4706a49968ab54e087ebce062 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 631.375270] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bbd56d4706a49968ab54e087ebce062 [ 631.626735] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd57d0b-bf39-4854-8fab-0096d9df7201 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.635622] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65518bca-8eb6-4fac-8680-7be6429b41c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.666464] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b3ce0c-2c2c-496e-9782-32c97ff4ec39 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.673571] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d591ab-1c4e-46be-9a60-565a70db7d0b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.688411] env[62109]: DEBUG nova.compute.provider_tree [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.689017] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg d0b7e32d28d54aea8503dab968d1a28a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 631.695722] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0b7e32d28d54aea8503dab968d1a28a [ 631.742626] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg e686475aefb94688a9f590dcc3a3d4a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 631.786423] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e686475aefb94688a9f590dcc3a3d4a5 [ 631.953646] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.191576] env[62109]: DEBUG nova.scheduler.client.report [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 632.194089] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg fdaa7eb301794dbd8cfd04cb39369db5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 632.197230] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.197661] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg ec53d40badaa4c99b48221548fce5238 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 632.209730] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdaa7eb301794dbd8cfd04cb39369db5 [ 632.210338] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec53d40badaa4c99b48221548fce5238 [ 632.252534] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 632.279976] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.280280] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.280436] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.280608] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.280752] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.280894] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.282289] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.282522] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.282709] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.282884] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.283087] env[62109]: DEBUG nova.virt.hardware [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.283951] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b5c435-3105-4e59-80ce-9cd47a5494f0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.292427] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bed8e5-ba39-4512-a629-84be6755fbcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.430399] env[62109]: ERROR nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. [ 632.430399] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 632.430399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.430399] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 632.430399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.430399] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 632.430399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.430399] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 632.430399] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.430399] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 632.430399] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.430399] env[62109]: ERROR nova.compute.manager raise self.value [ 632.430399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.430399] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 632.430399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.430399] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 632.430843] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.430843] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 632.430843] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. [ 632.430843] env[62109]: ERROR nova.compute.manager [ 632.430843] env[62109]: Traceback (most recent call last): [ 632.430843] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 632.430843] env[62109]: listener.cb(fileno) [ 632.430843] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.430843] env[62109]: result = function(*args, **kwargs) [ 632.430843] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 632.430843] env[62109]: return func(*args, **kwargs) [ 632.430843] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.430843] env[62109]: raise e [ 632.430843] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.430843] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 632.430843] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.430843] env[62109]: created_port_ids = self._update_ports_for_instance( [ 632.430843] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.430843] env[62109]: with excutils.save_and_reraise_exception(): [ 632.430843] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.430843] env[62109]: self.force_reraise() [ 632.430843] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.430843] env[62109]: raise self.value [ 632.430843] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.430843] env[62109]: updated_port = self._update_port( [ 632.430843] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.430843] env[62109]: _ensure_no_port_binding_failure(port) [ 632.430843] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.430843] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 632.431474] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. [ 632.431474] env[62109]: Removing descriptor: 16 [ 632.431474] env[62109]: ERROR nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Traceback (most recent call last): [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] yield resources [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self.driver.spawn(context, instance, image_meta, [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.431474] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] vm_ref = self.build_virtual_machine(instance, [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] for vif in network_info: [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] return self._sync_wrapper(fn, *args, **kwargs) [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self.wait() [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self[:] = self._gt.wait() [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] return self._exit_event.wait() [ 632.431762] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] result = hub.switch() [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] return self.greenlet.switch() [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] result = function(*args, **kwargs) [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] return func(*args, **kwargs) [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] raise e [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] nwinfo = self.network_api.allocate_for_instance( [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.432072] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] created_port_ids = self._update_ports_for_instance( [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] with excutils.save_and_reraise_exception(): [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self.force_reraise() [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] raise self.value [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] updated_port = self._update_port( [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] _ensure_no_port_binding_failure(port) [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.432376] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] raise exception.PortBindingFailed(port_id=port['id']) [ 632.432665] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] nova.exception.PortBindingFailed: Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. [ 632.432665] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] [ 632.432665] env[62109]: INFO nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Terminating instance [ 632.433954] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Acquiring lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.434104] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Acquired lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.434144] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 632.434553] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 4917478fc6e140f6a4f6ca94eedac69a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 632.445676] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4917478fc6e140f6a4f6ca94eedac69a [ 632.541133] env[62109]: DEBUG nova.compute.manager [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Received event network-changed-0a638f3f-d212-4e30-8d09-d277b8a79f1f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 632.541133] env[62109]: DEBUG nova.compute.manager [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Refreshing instance network info cache due to event network-changed-0a638f3f-d212-4e30-8d09-d277b8a79f1f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 632.541133] env[62109]: DEBUG oslo_concurrency.lockutils [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] Acquiring lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.558729] env[62109]: DEBUG nova.compute.manager [req-bdcb5a5f-bbf4-484f-9737-6db3fa8d508d req-93d7cccb-79ff-46df-9144-7ff2783db987 service nova] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Received event network-vif-deleted-4757bd66-fea5-408a-b5a7-7b719b899f50 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 632.698171] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.698773] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 632.700524] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 73596b473ab54769bcee2872ac62fb62 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 632.701533] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.157s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.703231] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 56193cb9f10e482fbbaddd4c4834a214 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 632.706704] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Releasing lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.707101] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 632.707266] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 632.707515] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f694cd8-d478-4e5a-a4ed-6ee9694550b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.718741] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a7482c-0e65-417c-811e-6d29aa38b392 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.742046] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 35411b03-ace3-40da-8c3e-3872ac003bd3 could not be found. [ 632.742289] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 632.742464] env[62109]: INFO nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 632.742703] env[62109]: DEBUG oslo.service.loopingcall [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 632.742917] env[62109]: DEBUG nova.compute.manager [-] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 632.743008] env[62109]: DEBUG nova.network.neutron [-] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 632.753323] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73596b473ab54769bcee2872ac62fb62 [ 632.754151] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56193cb9f10e482fbbaddd4c4834a214 [ 632.763619] env[62109]: DEBUG nova.network.neutron [-] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.764275] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9ee885f2950e49a9844737e7e7077239 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 632.770390] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ee885f2950e49a9844737e7e7077239 [ 632.978225] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.209427] env[62109]: DEBUG nova.compute.utils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 633.210165] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg d989607a6c864c4c832370a1a69c3041 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 633.211155] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 633.211317] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 633.223581] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d989607a6c864c4c832370a1a69c3041 [ 633.230944] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.231183] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 908883389fcd48869cfcd082a19424e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 633.246009] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 908883389fcd48869cfcd082a19424e7 [ 633.267654] env[62109]: DEBUG nova.network.neutron [-] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.268237] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ed16b04aaf3a496b87825140f79d83a8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 633.276450] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed16b04aaf3a496b87825140f79d83a8 [ 633.310596] env[62109]: DEBUG nova.policy [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2857ea8202fc46b09da06c4ac904df95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f74de90347d44395b052d75738d6a065', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 633.626513] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd44bc8-ee9f-443e-b04e-c7c909810a89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.635534] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c146612-12bf-4843-8f59-4ca5a7680cbe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.664516] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0751f697-9393-48bc-a1e0-75c38b453913 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.679851] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a94c9d1-5863-46b8-889c-d851d1dcb679 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.701452] env[62109]: DEBUG nova.compute.provider_tree [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.702112] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 8b657a6a7062485ab68776923519b54a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 633.708572] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b657a6a7062485ab68776923519b54a [ 633.714873] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 633.717194] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 31c3e5a4a9d64d00a14ba717658acd31 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 633.734079] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Releasing lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.734478] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 633.734660] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 633.734975] env[62109]: DEBUG oslo_concurrency.lockutils [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] Acquired lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.735145] env[62109]: DEBUG nova.network.neutron [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Refreshing network info cache for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 633.735555] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] Expecting reply to msg 86cff54782dc43f3b30bc0d54e2c936e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 633.736756] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e48b11d-fbcf-4100-a70b-b1b9209950bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.746797] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66cfb15d-d5ab-4e14-af25-ca77daa696b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.758674] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86cff54782dc43f3b30bc0d54e2c936e [ 633.758674] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c3e5a4a9d64d00a14ba717658acd31 [ 633.769306] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee could not be found. [ 633.769578] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 633.769755] env[62109]: INFO nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Took 0.04 seconds to destroy the instance on the hypervisor. [ 633.769993] env[62109]: DEBUG oslo.service.loopingcall [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 633.770201] env[62109]: DEBUG nova.compute.manager [-] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 633.770297] env[62109]: DEBUG nova.network.neutron [-] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 633.774245] env[62109]: INFO nova.compute.manager [-] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Took 1.03 seconds to deallocate network for instance. [ 633.775766] env[62109]: DEBUG nova.compute.claims [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 633.775941] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.785899] env[62109]: DEBUG nova.network.neutron [-] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.786373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cb4eda5a7fac4b1f9e6713adcf3c3929 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 633.796049] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb4eda5a7fac4b1f9e6713adcf3c3929 [ 634.043671] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Successfully created port: a8f53cf7-372e-482c-94e0-6ed3a6e4442a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.205085] env[62109]: DEBUG nova.scheduler.client.report [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 634.207926] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg b3e16f91827c4064811c953c202bcf15 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 634.221399] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 5177f4407ee84ce598cb3c5f0dd1e11a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 634.230812] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3e16f91827c4064811c953c202bcf15 [ 634.268499] env[62109]: DEBUG nova.network.neutron [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.281071] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5177f4407ee84ce598cb3c5f0dd1e11a [ 634.287725] env[62109]: DEBUG nova.network.neutron [-] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.288215] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fbf22130598c4570a49879bc1c48215d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 634.300415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbf22130598c4570a49879bc1c48215d [ 634.451141] env[62109]: DEBUG nova.network.neutron [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.451663] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] Expecting reply to msg 282badbc54d4479fa52c65657750267e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 634.464187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 282badbc54d4479fa52c65657750267e [ 634.711475] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.009s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.711781] env[62109]: ERROR nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Traceback (most recent call last): [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self.driver.spawn(context, instance, image_meta, [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self._vmops.spawn(context, instance, image_meta, injected_files, [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] vm_ref = self.build_virtual_machine(instance, [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] vif_infos = vmwarevif.get_vif_info(self._session, [ 634.711781] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] for vif in network_info: [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] return self._sync_wrapper(fn, *args, **kwargs) [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self.wait() [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self[:] = self._gt.wait() [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] return self._exit_event.wait() [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] result = hub.switch() [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 634.712099] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] return self.greenlet.switch() [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] result = function(*args, **kwargs) [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] return func(*args, **kwargs) [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] raise e [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] nwinfo = self.network_api.allocate_for_instance( [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] created_port_ids = self._update_ports_for_instance( [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] with excutils.save_and_reraise_exception(): [ 634.712399] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] self.force_reraise() [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] raise self.value [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] updated_port = self._update_port( [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] _ensure_no_port_binding_failure(port) [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] raise exception.PortBindingFailed(port_id=port['id']) [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] nova.exception.PortBindingFailed: Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. [ 634.712682] env[62109]: ERROR nova.compute.manager [instance: f1239cdd-d1b3-4494-8204-0fe150737579] [ 634.712923] env[62109]: DEBUG nova.compute.utils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 634.719059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.265s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.719059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.719059] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 634.719059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.531s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.719059] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg afc5b939489248ada78000d33fddb925 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 634.719331] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdc854e-b900-42ce-94fd-f79bf32d1e9d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.721629] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Build of instance f1239cdd-d1b3-4494-8204-0fe150737579 was re-scheduled: Binding failed for port 4f949fd6-9396-4f81-896d-140fe0cabbdf, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 634.722106] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 634.722327] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.722469] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquired lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.722618] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 634.723032] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg e0ce329b12ef4c649f9b3604405179c6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 634.739372] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0ce329b12ef4c649f9b3604405179c6 [ 634.744496] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 634.747905] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b699ded-8d5b-406f-86f3-97ff069f352b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.754982] env[62109]: DEBUG nova.compute.manager [req-4362e9c6-94ef-4564-944b-b66137fa3e91 req-5e5f471a-d14d-4c6e-b4c9-26838bee7d6a service nova] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Received event network-vif-deleted-0a638f3f-d212-4e30-8d09-d277b8a79f1f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 634.769274] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2cb9725-fd40-4308-a3f6-eb4b6d50d7c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.772041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afc5b939489248ada78000d33fddb925 [ 634.779199] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 634.780518] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 634.780518] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 634.780518] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 634.780518] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 634.780518] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 634.780686] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 634.780686] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 634.780686] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 634.780806] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 634.780968] env[62109]: DEBUG nova.virt.hardware [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 634.782037] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca82590-ece8-42d4-a02a-070474114b05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.786929] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce9ae53-5ea3-4049-8c51-fb588f29e52a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.790204] env[62109]: INFO nova.compute.manager [-] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Took 1.02 seconds to deallocate network for instance. [ 634.795603] env[62109]: DEBUG nova.compute.claims [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 634.795603] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.828862] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181671MB free_disk=124GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 634.828862] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.832496] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cbd4dc-a573-4f3a-9c3e-b6c45ab22533 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.954421] env[62109]: DEBUG oslo_concurrency.lockutils [req-f466914d-c8d0-453a-a9a1-edf156c6749d req-5a985a37-772a-440a-9fab-702c6aa877b7 service nova] Releasing lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.252045] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.357844] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.358684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 9f3e3743f9d94a9bbce474f767c36d70 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 635.369722] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f3e3743f9d94a9bbce474f767c36d70 [ 635.695110] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f840827a-2b80-472b-b5b6-c684c7b4fa6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.708325] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de6dcb4-ce57-481d-bdb3-7b5ada291d90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.740524] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e7dca5-3afc-42e2-8e4e-78d22eeb0285 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.748200] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15dcd26-b908-4774-9d94-e604b81db1a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.763317] env[62109]: DEBUG nova.compute.provider_tree [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.763836] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 930277453e5c490fa696a3ebde588cc2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 635.770709] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 930277453e5c490fa696a3ebde588cc2 [ 635.829129] env[62109]: DEBUG nova.compute.manager [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Received event network-changed-a8f53cf7-372e-482c-94e0-6ed3a6e4442a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 635.829332] env[62109]: DEBUG nova.compute.manager [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Refreshing instance network info cache due to event network-changed-a8f53cf7-372e-482c-94e0-6ed3a6e4442a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 635.829549] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] Acquiring lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.829687] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] Acquired lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.829842] env[62109]: DEBUG nova.network.neutron [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Refreshing network info cache for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.830447] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] Expecting reply to msg 6d7aa117a4b94682a04aa504253767f5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 635.838596] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d7aa117a4b94682a04aa504253767f5 [ 635.861117] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Releasing lock "refresh_cache-f1239cdd-d1b3-4494-8204-0fe150737579" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.861405] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 635.861630] env[62109]: DEBUG nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 635.861730] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 635.879346] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.879955] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg ad1ff60df0c04db1908a00b46181a81c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 635.887000] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad1ff60df0c04db1908a00b46181a81c [ 635.916267] env[62109]: ERROR nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. [ 635.916267] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 635.916267] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.916267] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 635.916267] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.916267] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 635.916267] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.916267] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 635.916267] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.916267] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 635.916267] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.916267] env[62109]: ERROR nova.compute.manager raise self.value [ 635.916267] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.916267] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 635.916267] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.916267] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 635.916669] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.916669] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 635.916669] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. [ 635.916669] env[62109]: ERROR nova.compute.manager [ 635.916761] env[62109]: Traceback (most recent call last): [ 635.916789] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 635.916789] env[62109]: listener.cb(fileno) [ 635.916789] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.916789] env[62109]: result = function(*args, **kwargs) [ 635.916789] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.916789] env[62109]: return func(*args, **kwargs) [ 635.916789] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.916789] env[62109]: raise e [ 635.916789] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.916789] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 635.916789] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.916789] env[62109]: created_port_ids = self._update_ports_for_instance( [ 635.916789] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.916789] env[62109]: with excutils.save_and_reraise_exception(): [ 635.916789] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.916789] env[62109]: self.force_reraise() [ 635.916789] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.916789] env[62109]: raise self.value [ 635.916789] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.916789] env[62109]: updated_port = self._update_port( [ 635.916789] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.916789] env[62109]: _ensure_no_port_binding_failure(port) [ 635.916789] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.916789] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 635.917291] env[62109]: nova.exception.PortBindingFailed: Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. [ 635.917291] env[62109]: Removing descriptor: 19 [ 635.918541] env[62109]: ERROR nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Traceback (most recent call last): [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] yield resources [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self.driver.spawn(context, instance, image_meta, [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] vm_ref = self.build_virtual_machine(instance, [ 635.918541] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] for vif in network_info: [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] return self._sync_wrapper(fn, *args, **kwargs) [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self.wait() [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self[:] = self._gt.wait() [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] return self._exit_event.wait() [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.918810] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] result = hub.switch() [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] return self.greenlet.switch() [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] result = function(*args, **kwargs) [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] return func(*args, **kwargs) [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] raise e [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] nwinfo = self.network_api.allocate_for_instance( [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] created_port_ids = self._update_ports_for_instance( [ 635.919128] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] with excutils.save_and_reraise_exception(): [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self.force_reraise() [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] raise self.value [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] updated_port = self._update_port( [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] _ensure_no_port_binding_failure(port) [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] raise exception.PortBindingFailed(port_id=port['id']) [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] nova.exception.PortBindingFailed: Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. [ 635.919408] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] [ 635.919692] env[62109]: INFO nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Terminating instance [ 635.920828] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.269843] env[62109]: DEBUG nova.scheduler.client.report [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 636.272510] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg ea79b17d2bee480a85a8ba9ee791d4a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 636.286376] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea79b17d2bee480a85a8ba9ee791d4a5 [ 636.374991] env[62109]: DEBUG nova.network.neutron [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.382508] env[62109]: DEBUG nova.network.neutron [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.383046] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 5b996126775046a0917a7856aa6194b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 636.392361] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b996126775046a0917a7856aa6194b0 [ 636.720786] env[62109]: DEBUG nova.network.neutron [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.721380] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] Expecting reply to msg da7d805b1a7041a4956bf9900c8318ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 636.730505] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da7d805b1a7041a4956bf9900c8318ca [ 636.775610] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.061s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.776279] env[62109]: ERROR nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Traceback (most recent call last): [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self.driver.spawn(context, instance, image_meta, [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] vm_ref = self.build_virtual_machine(instance, [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] vif_infos = vmwarevif.get_vif_info(self._session, [ 636.776279] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] for vif in network_info: [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] return self._sync_wrapper(fn, *args, **kwargs) [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self.wait() [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self[:] = self._gt.wait() [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] return self._exit_event.wait() [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] result = hub.switch() [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 636.776602] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] return self.greenlet.switch() [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] result = function(*args, **kwargs) [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] return func(*args, **kwargs) [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] raise e [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] nwinfo = self.network_api.allocate_for_instance( [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] created_port_ids = self._update_ports_for_instance( [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] with excutils.save_and_reraise_exception(): [ 636.776977] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] self.force_reraise() [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] raise self.value [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] updated_port = self._update_port( [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] _ensure_no_port_binding_failure(port) [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] raise exception.PortBindingFailed(port_id=port['id']) [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] nova.exception.PortBindingFailed: Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. [ 636.777267] env[62109]: ERROR nova.compute.manager [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] [ 636.777525] env[62109]: DEBUG nova.compute.utils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 636.778200] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.928s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.779657] env[62109]: INFO nova.compute.claims [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.781187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 097d43c26df648b4b8ed894de1ff035b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 636.782360] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Build of instance ce54ba3d-2cd5-4400-b334-8443ef73bbff was re-scheduled: Binding failed for port a786577c-7bcb-4451-83d7-073ce6e077ed, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 636.782782] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 636.783001] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Acquiring lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.783145] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Acquired lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.783296] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 636.783650] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg eb3fd10e0654471fa82c1cbf6c141c14 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 636.795122] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb3fd10e0654471fa82c1cbf6c141c14 [ 636.838062] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 097d43c26df648b4b8ed894de1ff035b [ 636.886247] env[62109]: INFO nova.compute.manager [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: f1239cdd-d1b3-4494-8204-0fe150737579] Took 1.02 seconds to deallocate network for instance. [ 636.887709] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 60fd9fbb6b1b4531b93a3e0f8da1d7ac in queue reply_7522b64acfeb4981b1f36928b040d568 [ 636.924037] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60fd9fbb6b1b4531b93a3e0f8da1d7ac [ 637.224391] env[62109]: DEBUG oslo_concurrency.lockutils [req-adc2c4a9-f7f9-4176-8987-c5fccde0bc05 req-e9b7463b-e1f9-4394-baa4-14cfcd4ae820 service nova] Releasing lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.224814] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquired lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.225006] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 637.225454] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg b38a6c1382754672a9d594b0b6a1e676 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 637.233676] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b38a6c1382754672a9d594b0b6a1e676 [ 637.287792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg cd07f6bc59a241f69a3f41e26c504e7b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 637.298075] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd07f6bc59a241f69a3f41e26c504e7b [ 637.339045] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.392361] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 94b86e89a56a411a910d0de42e39e290 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 637.437938] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94b86e89a56a411a910d0de42e39e290 [ 637.620521] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.621060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg f806cf6c63b1465b90e5a105be980052 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 637.631752] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f806cf6c63b1465b90e5a105be980052 [ 637.749510] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.916401] env[62109]: INFO nova.scheduler.client.report [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Deleted allocations for instance f1239cdd-d1b3-4494-8204-0fe150737579 [ 637.923956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 5ea8d91f7b8d41499fb819d1c5c93198 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 637.923956] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.923956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg f02ba5351241462d89e9e27a572bc4cf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 637.942662] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ea8d91f7b8d41499fb819d1c5c93198 [ 637.943341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f02ba5351241462d89e9e27a572bc4cf [ 638.125016] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Releasing lock "refresh_cache-ce54ba3d-2cd5-4400-b334-8443ef73bbff" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.125308] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 638.125536] env[62109]: DEBUG nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.125683] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 638.152991] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.153616] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 0ed830693aad403eaf3717fb1ff1a2ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 638.163860] env[62109]: DEBUG nova.compute.manager [req-2d45e6cd-54b3-43e7-adf0-e57ff6753af9 req-7512206a-0e33-48c4-bbcd-3bf6a395605c service nova] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Received event network-vif-deleted-a8f53cf7-372e-482c-94e0-6ed3a6e4442a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 638.164452] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "87304cf6-e65f-41de-ab6f-d2170aaa9064" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.164659] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "87304cf6-e65f-41de-ab6f-d2170aaa9064" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.168363] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ed830693aad403eaf3717fb1ff1a2ca [ 638.308504] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3724fc7c-b362-4843-8e4f-9c61866f27a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.316732] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110c3c5d-c002-41b5-b9cc-f27ed9df717f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.367600] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee377ebb-3518-4fb4-87e4-bc59a65607dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.372466] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3960ef88-bc72-45dd-a8ae-f3afeef9f6d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.387003] env[62109]: DEBUG nova.compute.provider_tree [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.387523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg b0f3a0b7767a462d8011a368728d55b3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 638.397589] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0f3a0b7767a462d8011a368728d55b3 [ 638.426164] env[62109]: DEBUG oslo_concurrency.lockutils [None req-433c9b89-9ec3-4e6f-aa83-1de888b868a0 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "f1239cdd-d1b3-4494-8204-0fe150737579" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.247s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.426872] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 8363a1107676429db4b36919924d02fe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 638.428129] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Releasing lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.428538] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 638.428785] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 638.429584] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b67f6ca9-1990-4282-a92d-3f773b21dfbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.439829] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba49192-6c11-441f-8dcd-ce827106e3e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.452579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8363a1107676429db4b36919924d02fe [ 638.464588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "90c50f92-c1ff-4ac9-a819-ae0083884e28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.464827] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "90c50f92-c1ff-4ac9-a819-ae0083884e28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.465513] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932 could not be found. [ 638.465787] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 638.465906] env[62109]: INFO nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Took 0.04 seconds to destroy the instance on the hypervisor. [ 638.466141] env[62109]: DEBUG oslo.service.loopingcall [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 638.466416] env[62109]: DEBUG nova.compute.manager [-] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.466507] env[62109]: DEBUG nova.network.neutron [-] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 638.486856] env[62109]: DEBUG nova.network.neutron [-] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.487631] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 326e472aceed4cc5b0c8cdcaa375dd5a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 638.494752] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 326e472aceed4cc5b0c8cdcaa375dd5a [ 638.656535] env[62109]: DEBUG nova.network.neutron [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.656535] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg d624edaf9ef1439a86450bbbec1f8986 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 638.665395] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d624edaf9ef1439a86450bbbec1f8986 [ 638.889785] env[62109]: DEBUG nova.scheduler.client.report [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 638.892324] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg b0d9314be90e49b783116e1d99394c88 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 638.908072] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0d9314be90e49b783116e1d99394c88 [ 638.930069] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 638.931822] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg f802d655412a45faae2e6e2e5d61bfc8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 638.963790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f802d655412a45faae2e6e2e5d61bfc8 [ 638.990514] env[62109]: DEBUG nova.network.neutron [-] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.990939] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 394654f0d3ce46489ce04fad231434e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 639.000728] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 394654f0d3ce46489ce04fad231434e5 [ 639.161646] env[62109]: INFO nova.compute.manager [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] [instance: ce54ba3d-2cd5-4400-b334-8443ef73bbff] Took 1.03 seconds to deallocate network for instance. [ 639.161646] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 4532580012664ef38e275664446ab3ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 639.213658] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4532580012664ef38e275664446ab3ca [ 639.395536] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.617s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.396026] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 639.397656] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg dff01e5c272b406c877da78e4fe7b6dd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 639.398651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.309s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.400721] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 859f00567adc4d158c8d487b506f1b53 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 639.439295] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dff01e5c272b406c877da78e4fe7b6dd [ 639.447158] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 859f00567adc4d158c8d487b506f1b53 [ 639.454585] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.494501] env[62109]: INFO nova.compute.manager [-] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Took 1.03 seconds to deallocate network for instance. [ 639.497081] env[62109]: DEBUG nova.compute.claims [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 639.497081] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.665281] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 4194c7df03a84c878c72424fc9eea96e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 639.700740] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4194c7df03a84c878c72424fc9eea96e [ 639.903482] env[62109]: DEBUG nova.compute.utils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 639.904158] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg ae23dbbed76442429dc2d369b7304ac9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 639.908319] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 639.908480] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 639.917532] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae23dbbed76442429dc2d369b7304ac9 [ 640.022952] env[62109]: DEBUG nova.policy [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13438e8357694da8b1843f8d44270eac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1334c2de495a461fa55e937653441f68', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 640.196976] env[62109]: INFO nova.scheduler.client.report [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Deleted allocations for instance ce54ba3d-2cd5-4400-b334-8443ef73bbff [ 640.206346] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Expecting reply to msg 47ae046b5d084a4384cba49009705c55 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 640.225653] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47ae046b5d084a4384cba49009705c55 [ 640.350188] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012bb832-602a-48b1-b5d9-25c1987d66d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.357289] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d7cf5b-bfcc-4baf-ad7a-ab98b0ab2a7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.386963] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1735f856-6a14-44bf-9756-0434f31e0aa7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.394614] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feaa92be-b89f-4258-b0b8-35df4cd26478 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.410680] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 640.412371] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 8d1883af251b4237959003bc7d2e703a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 640.413521] env[62109]: DEBUG nova.compute.provider_tree [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.413963] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 5b9929d0a9774a988edf4b3d0fd57128 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 640.423951] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b9929d0a9774a988edf4b3d0fd57128 [ 640.460766] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d1883af251b4237959003bc7d2e703a [ 640.464459] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Successfully created port: 137bb12e-27d8-4868-8174-3f7459cea1f9 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.577487] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Acquiring lock "ae026dca-dc05-4710-8a03-4e792a0dc61d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.577714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Lock "ae026dca-dc05-4710-8a03-4e792a0dc61d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.708050] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a4e1eb09-4885-4789-90e5-9152c14489d2 tempest-ServerPasswordTestJSON-1991055656 tempest-ServerPasswordTestJSON-1991055656-project-member] Lock "ce54ba3d-2cd5-4400-b334-8443ef73bbff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.948s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.711477] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg f0af7a3b1c8b4f9a8ace006b425fc01d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 640.722587] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0af7a3b1c8b4f9a8ace006b425fc01d [ 640.789688] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "16b04a1b-0ab3-4386-a1eb-74ef3e46a553" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.789912] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "16b04a1b-0ab3-4386-a1eb-74ef3e46a553" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.918527] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg ea89f4709c7c49f19ef846ff45bc891a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 640.920131] env[62109]: DEBUG nova.scheduler.client.report [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.922534] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 59206895f19442dbadce0a81a53b82ad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 640.936890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59206895f19442dbadce0a81a53b82ad [ 640.952075] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea89f4709c7c49f19ef846ff45bc891a [ 641.210924] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 641.212711] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 24ac077f0a774dd1b30edda11cb72c4c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 641.281658] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24ac077f0a774dd1b30edda11cb72c4c [ 641.425185] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.026s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.426131] env[62109]: ERROR nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Traceback (most recent call last): [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self.driver.spawn(context, instance, image_meta, [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self._vmops.spawn(context, instance, image_meta, injected_files, [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] vm_ref = self.build_virtual_machine(instance, [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] vif_infos = vmwarevif.get_vif_info(self._session, [ 641.426131] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] for vif in network_info: [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] return self._sync_wrapper(fn, *args, **kwargs) [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self.wait() [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self[:] = self._gt.wait() [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] return self._exit_event.wait() [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] result = hub.switch() [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 641.426466] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] return self.greenlet.switch() [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] result = function(*args, **kwargs) [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] return func(*args, **kwargs) [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] raise e [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] nwinfo = self.network_api.allocate_for_instance( [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] created_port_ids = self._update_ports_for_instance( [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] with excutils.save_and_reraise_exception(): [ 641.426912] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] self.force_reraise() [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] raise self.value [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] updated_port = self._update_port( [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] _ensure_no_port_binding_failure(port) [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] raise exception.PortBindingFailed(port_id=port['id']) [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] nova.exception.PortBindingFailed: Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. [ 641.427217] env[62109]: ERROR nova.compute.manager [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] [ 641.427497] env[62109]: DEBUG nova.compute.utils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 641.428886] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 641.431165] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Build of instance 5f4a5c62-85f1-47ee-b702-1785bfe62f48 was re-scheduled: Binding failed for port cbcafabe-4af3-498d-b675-7d4cb156228f, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 641.431721] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 641.431835] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.431980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquired lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.432161] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 641.432572] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 687cb38d49454021bc1418673c3cf7af in queue reply_7522b64acfeb4981b1f36928b040d568 [ 641.433336] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.721s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.436629] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 2330cd899343419db6b9823dd38c68e6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 641.440534] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 687cb38d49454021bc1418673c3cf7af [ 641.459523] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 641.459523] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 641.459523] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.459755] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 641.459755] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.459755] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 641.459755] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 641.459755] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 641.459943] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 641.459943] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 641.459943] env[62109]: DEBUG nova.virt.hardware [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 641.459943] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9071f6-d6e7-4b94-ab57-2e8298122435 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.468623] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c189ad-fbc2-44e5-a66b-e843814a6819 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.476471] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2330cd899343419db6b9823dd38c68e6 [ 641.535272] env[62109]: DEBUG nova.compute.manager [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Received event network-changed-137bb12e-27d8-4868-8174-3f7459cea1f9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 641.535469] env[62109]: DEBUG nova.compute.manager [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Refreshing instance network info cache due to event network-changed-137bb12e-27d8-4868-8174-3f7459cea1f9. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 641.535681] env[62109]: DEBUG oslo_concurrency.lockutils [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] Acquiring lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.535820] env[62109]: DEBUG oslo_concurrency.lockutils [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] Acquired lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.535973] env[62109]: DEBUG nova.network.neutron [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Refreshing network info cache for port 137bb12e-27d8-4868-8174-3f7459cea1f9 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 641.536420] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] Expecting reply to msg 6784a98731d34887807b894d6dedea95 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 641.543923] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6784a98731d34887807b894d6dedea95 [ 641.743763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.803413] env[62109]: ERROR nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. [ 641.803413] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 641.803413] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.803413] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 641.803413] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 641.803413] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 641.803413] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 641.803413] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 641.803413] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.803413] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 641.803413] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.803413] env[62109]: ERROR nova.compute.manager raise self.value [ 641.803413] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 641.803413] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 641.803413] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.803413] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 641.803901] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.803901] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 641.803901] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. [ 641.803901] env[62109]: ERROR nova.compute.manager [ 641.803901] env[62109]: Traceback (most recent call last): [ 641.803901] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 641.803901] env[62109]: listener.cb(fileno) [ 641.803901] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.803901] env[62109]: result = function(*args, **kwargs) [ 641.803901] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 641.803901] env[62109]: return func(*args, **kwargs) [ 641.803901] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 641.803901] env[62109]: raise e [ 641.803901] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.803901] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 641.803901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 641.803901] env[62109]: created_port_ids = self._update_ports_for_instance( [ 641.803901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 641.803901] env[62109]: with excutils.save_and_reraise_exception(): [ 641.803901] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.803901] env[62109]: self.force_reraise() [ 641.803901] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.803901] env[62109]: raise self.value [ 641.803901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 641.803901] env[62109]: updated_port = self._update_port( [ 641.803901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.803901] env[62109]: _ensure_no_port_binding_failure(port) [ 641.803901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.803901] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 641.804716] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. [ 641.804716] env[62109]: Removing descriptor: 19 [ 641.804716] env[62109]: ERROR nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Traceback (most recent call last): [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] yield resources [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self.driver.spawn(context, instance, image_meta, [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self._vmops.spawn(context, instance, image_meta, injected_files, [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 641.804716] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] vm_ref = self.build_virtual_machine(instance, [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] vif_infos = vmwarevif.get_vif_info(self._session, [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] for vif in network_info: [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] return self._sync_wrapper(fn, *args, **kwargs) [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self.wait() [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self[:] = self._gt.wait() [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] return self._exit_event.wait() [ 641.805045] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] result = hub.switch() [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] return self.greenlet.switch() [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] result = function(*args, **kwargs) [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] return func(*args, **kwargs) [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] raise e [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] nwinfo = self.network_api.allocate_for_instance( [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 641.805395] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] created_port_ids = self._update_ports_for_instance( [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] with excutils.save_and_reraise_exception(): [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self.force_reraise() [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] raise self.value [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] updated_port = self._update_port( [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] _ensure_no_port_binding_failure(port) [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.805744] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] raise exception.PortBindingFailed(port_id=port['id']) [ 641.806078] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] nova.exception.PortBindingFailed: Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. [ 641.806078] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] [ 641.806078] env[62109]: INFO nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Terminating instance [ 641.806523] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Acquiring lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.992344] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.061342] env[62109]: DEBUG nova.network.neutron [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.215618] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.216186] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 076edad64d1c4840baef551fd89a6e38 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 642.223496] env[62109]: DEBUG nova.network.neutron [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.223995] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] Expecting reply to msg 59b6230724a04ba997e2cd683da5938e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 642.228563] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 076edad64d1c4840baef551fd89a6e38 [ 642.232871] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59b6230724a04ba997e2cd683da5938e [ 642.452992] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837a1f7e-738b-4532-b1ba-a028d615676a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.460599] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf48e08f-735d-4a39-b366-60d72dd943be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.495058] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a260d677-3ca5-4091-9969-0a3e9dd3b8f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.502580] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f15934-c4a5-4051-82ed-cf82e8863468 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.515644] env[62109]: DEBUG nova.compute.provider_tree [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.516177] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg c578e513bc1e4acbb0b613de3b5b7f51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 642.522923] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c578e513bc1e4acbb0b613de3b5b7f51 [ 642.718543] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Releasing lock "refresh_cache-5f4a5c62-85f1-47ee-b702-1785bfe62f48" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.718799] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 642.719201] env[62109]: DEBUG nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 642.719377] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 642.729886] env[62109]: DEBUG oslo_concurrency.lockutils [req-e6b12ba7-fbf8-46e3-ae3e-27354b7eb252 req-6a66cf60-78e3-4b25-8a1c-39c23d72f447 service nova] Releasing lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.730278] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Acquired lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.730456] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.730897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 655aecbe42894f768f5212e83e6be292 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 642.740849] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.741657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg ed26fd77810d47ccada20ba601d6ff8c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 642.749064] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 655aecbe42894f768f5212e83e6be292 [ 642.753876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed26fd77810d47ccada20ba601d6ff8c [ 643.018954] env[62109]: DEBUG nova.scheduler.client.report [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 643.022507] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg bb836bc9ea984e45b4d3bf08fefa25e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 643.033841] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb836bc9ea984e45b4d3bf08fefa25e5 [ 643.245639] env[62109]: DEBUG nova.network.neutron [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.246254] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 6c49dc1b88ec4a298b31db1ac47958ab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 643.257086] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c49dc1b88ec4a298b31db1ac47958ab [ 643.264850] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.435029] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.435592] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg d56e33fdd4d245aa985d6bdbd0c91983 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 643.445802] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d56e33fdd4d245aa985d6bdbd0c91983 [ 643.529567] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.093s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.529567] env[62109]: ERROR nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. [ 643.529567] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Traceback (most recent call last): [ 643.529567] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 643.529567] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self.driver.spawn(context, instance, image_meta, [ 643.529567] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 643.529567] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.529567] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.529567] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] vm_ref = self.build_virtual_machine(instance, [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] for vif in network_info: [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] return self._sync_wrapper(fn, *args, **kwargs) [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self.wait() [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self[:] = self._gt.wait() [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] return self._exit_event.wait() [ 643.530296] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] result = hub.switch() [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] return self.greenlet.switch() [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] result = function(*args, **kwargs) [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] return func(*args, **kwargs) [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] raise e [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] nwinfo = self.network_api.allocate_for_instance( [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.531033] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] created_port_ids = self._update_ports_for_instance( [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] with excutils.save_and_reraise_exception(): [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] self.force_reraise() [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] raise self.value [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] updated_port = self._update_port( [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] _ensure_no_port_binding_failure(port) [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.531369] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] raise exception.PortBindingFailed(port_id=port['id']) [ 643.531726] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] nova.exception.PortBindingFailed: Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. [ 643.531726] env[62109]: ERROR nova.compute.manager [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] [ 643.531726] env[62109]: DEBUG nova.compute.utils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 643.531726] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.175s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.531726] env[62109]: INFO nova.compute.claims [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.533102] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 432b1335d3324dfd86ea87459dd91344 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 643.533729] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Build of instance 56f9bb28-2770-46aa-9d95-f60cdeae0967 was re-scheduled: Binding failed for port 1d761917-ebce-43d1-b26b-0773dc004f35, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 643.534181] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 643.534402] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquiring lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.534538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Acquired lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.534690] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 643.535066] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 0141ca85c3044da9a5f54a880bf76fd8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 643.542715] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0141ca85c3044da9a5f54a880bf76fd8 [ 643.596442] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 432b1335d3324dfd86ea87459dd91344 [ 643.632250] env[62109]: DEBUG nova.compute.manager [req-b828f58f-a57c-42c8-9808-838acf09da93 req-6a83bdc7-c76e-47fe-8d23-13bad883fd7c service nova] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Received event network-vif-deleted-137bb12e-27d8-4868-8174-3f7459cea1f9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 643.748787] env[62109]: INFO nova.compute.manager [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 5f4a5c62-85f1-47ee-b702-1785bfe62f48] Took 1.03 seconds to deallocate network for instance. [ 643.751624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 9ee9e83f26584f40909bd8384e65da77 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 643.796181] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ee9e83f26584f40909bd8384e65da77 [ 643.938263] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Releasing lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.938699] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 643.938897] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 643.939234] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ab230c4-0b7e-4367-8218-76b14021dacd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.948273] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1947564-550a-4516-938b-e4ea87e45e94 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.970482] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 80a15f60-4843-4a59-a6c1-0d5624609672 could not be found. [ 643.970705] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 643.970878] env[62109]: INFO nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Took 0.03 seconds to destroy the instance on the hypervisor. [ 643.971122] env[62109]: DEBUG oslo.service.loopingcall [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.971335] env[62109]: DEBUG nova.compute.manager [-] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 643.971472] env[62109]: DEBUG nova.network.neutron [-] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.029082] env[62109]: DEBUG nova.network.neutron [-] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.029596] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a495b8ad062743df840b1fc4fe378547 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 644.037937] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 596697993bff4a4ba98ef1e3a4f74021 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 644.044849] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a495b8ad062743df840b1fc4fe378547 [ 644.053793] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 596697993bff4a4ba98ef1e3a4f74021 [ 644.098445] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.258699] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 081166ce8dbe4efe93afd7885b2ad164 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 644.290946] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.291526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 0930f0c36f944727981e3ac5f0734989 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 644.304073] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0930f0c36f944727981e3ac5f0734989 [ 644.307400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 081166ce8dbe4efe93afd7885b2ad164 [ 644.532697] env[62109]: DEBUG nova.network.neutron [-] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.533427] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6154e06a745642d0bf08a1547cd28b75 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 644.543485] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6154e06a745642d0bf08a1547cd28b75 [ 644.568658] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f672e1-7fe8-4d94-89b0-388ca7a6ab05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.576455] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84526c5d-a190-4b04-a668-9473dcd409d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.606033] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c763db-93c0-4240-a19f-66c3ae11c378 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.614255] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2ae272-8d36-445b-9ce5-e3bf7d162a84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.629309] env[62109]: DEBUG nova.compute.provider_tree [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.629953] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 7b16b0bd799142338db0227a908cc81c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 644.636928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b16b0bd799142338db0227a908cc81c [ 644.790950] env[62109]: INFO nova.scheduler.client.report [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Deleted allocations for instance 5f4a5c62-85f1-47ee-b702-1785bfe62f48 [ 644.796319] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Releasing lock "refresh_cache-56f9bb28-2770-46aa-9d95-f60cdeae0967" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.800109] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 644.800109] env[62109]: DEBUG nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 644.800109] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.800109] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 208d5aff504c46d6b1189e1139ef02d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 644.818101] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 208d5aff504c46d6b1189e1139ef02d4 [ 644.824012] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.824660] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg f25e4774ddf147cc9519bde1dea5a9b3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 644.836911] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f25e4774ddf147cc9519bde1dea5a9b3 [ 645.035783] env[62109]: INFO nova.compute.manager [-] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Took 1.06 seconds to deallocate network for instance. [ 645.038599] env[62109]: DEBUG nova.compute.claims [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 645.038599] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.136278] env[62109]: DEBUG nova.scheduler.client.report [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 645.136278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg aa9c6cd3674246f485d68604181a8500 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 645.147806] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa9c6cd3674246f485d68604181a8500 [ 645.309030] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ae7f93d7-5a72-435a-8e7c-94046f28c0bf tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "5f4a5c62-85f1-47ee-b702-1785bfe62f48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.498s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.309683] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg bc569f5b73c545b29e59eaa41fa40d95 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 645.328020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc569f5b73c545b29e59eaa41fa40d95 [ 645.328020] env[62109]: DEBUG nova.network.neutron [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.328020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 225a1571e4d94ab495ea3ad0f66de3df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 645.336564] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 225a1571e4d94ab495ea3ad0f66de3df [ 645.637977] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.109s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.638781] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 645.642204] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg e889925a41e54b078a55e5b5366a85cf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 645.642204] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.536s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.643135] env[62109]: INFO nova.compute.claims [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.644474] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg b4a070c28a384c2f91184ab9c4c66e89 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 645.681765] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e889925a41e54b078a55e5b5366a85cf [ 645.691889] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4a070c28a384c2f91184ab9c4c66e89 [ 645.814449] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 645.814449] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg a46fd45893334cf79e847709f03be058 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 645.836696] env[62109]: INFO nova.compute.manager [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] [instance: 56f9bb28-2770-46aa-9d95-f60cdeae0967] Took 1.04 seconds to deallocate network for instance. [ 645.838378] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg b34f14d9842a4f3ea594364f6cfd54d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 645.852760] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a46fd45893334cf79e847709f03be058 [ 645.896457] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b34f14d9842a4f3ea594364f6cfd54d0 [ 645.938639] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "08638aac-2c6c-4580-9894-6b3b3c1ec484" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.938808] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "08638aac-2c6c-4580-9894-6b3b3c1ec484" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.147390] env[62109]: DEBUG nova.compute.utils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.148065] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg c9b0137f7f1545458f1eb539f6802596 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 646.148989] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 646.149148] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 646.152516] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 47e7d504003c420c93181b9aadf47613 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 646.161294] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47e7d504003c420c93181b9aadf47613 [ 646.162776] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9b0137f7f1545458f1eb539f6802596 [ 646.247551] env[62109]: DEBUG nova.policy [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bc490e03afb422a86e04901d36d6945', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c19034722a8742e7b62e6107e9028a9c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 646.334774] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.342418] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg d40c44cb1a894461bb91558aa297be08 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 646.383172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d40c44cb1a894461bb91558aa297be08 [ 646.654666] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 646.656418] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 7a8659a22f884910a4745fb54d54f749 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 646.699858] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a8659a22f884910a4745fb54d54f749 [ 646.706424] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Successfully created port: 530e4dd8-07d2-480d-9d84-700373a8d44a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 646.868281] env[62109]: INFO nova.scheduler.client.report [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Deleted allocations for instance 56f9bb28-2770-46aa-9d95-f60cdeae0967 [ 646.877005] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Expecting reply to msg 9b84902cf34c46b58e8dd2fffdac1ac7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 646.894279] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b84902cf34c46b58e8dd2fffdac1ac7 [ 647.134815] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0814641-e9f2-4ce9-b374-b4047546878c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.136916] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e32d799-536f-4623-9d8d-358856c84ad1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.176020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 395130f9778944f989ac7ca7f18e71d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 647.188850] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07cda26-b1e1-4520-9515-3dcbdadcd6e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.202798] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd537101-bebe-419b-9924-c0f62addec91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.217248] env[62109]: DEBUG nova.compute.provider_tree [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.217874] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 91a6fc7dc21549ce97b9399a4056e138 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 647.219541] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 395130f9778944f989ac7ca7f18e71d7 [ 647.224749] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91a6fc7dc21549ce97b9399a4056e138 [ 647.381410] env[62109]: DEBUG oslo_concurrency.lockutils [None req-26b49dcd-b372-4629-ab78-5c002c219e38 tempest-VolumesAdminNegativeTest-459345843 tempest-VolumesAdminNegativeTest-459345843-project-member] Lock "56f9bb28-2770-46aa-9d95-f60cdeae0967" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.231s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.382075] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 4dcdf2f3278147b5b7243530304874f1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 647.393088] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dcdf2f3278147b5b7243530304874f1 [ 647.690057] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 647.716326] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.716592] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.716778] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.717004] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.717592] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.717789] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.718086] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.718234] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.718395] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.718547] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.718715] env[62109]: DEBUG nova.virt.hardware [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.719567] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efa5994-28f8-4660-a536-b1e2ba798c34 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.722726] env[62109]: DEBUG nova.scheduler.client.report [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 647.725146] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 7cab1dcf68c94e37a3ec9c7926d55527 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 647.731444] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c621a20-9246-4bd9-ae4d-afeb474fab4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.748209] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cab1dcf68c94e37a3ec9c7926d55527 [ 647.884721] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 647.886558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 0cb4c8a3fa88446e8922f630cd4b5b8b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 647.919511] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cb4c8a3fa88446e8922f630cd4b5b8b [ 648.044201] env[62109]: DEBUG nova.compute.manager [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Received event network-changed-530e4dd8-07d2-480d-9d84-700373a8d44a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 648.044397] env[62109]: DEBUG nova.compute.manager [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Refreshing instance network info cache due to event network-changed-530e4dd8-07d2-480d-9d84-700373a8d44a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 648.044609] env[62109]: DEBUG oslo_concurrency.lockutils [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] Acquiring lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.044747] env[62109]: DEBUG oslo_concurrency.lockutils [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] Acquired lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.044902] env[62109]: DEBUG nova.network.neutron [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Refreshing network info cache for port 530e4dd8-07d2-480d-9d84-700373a8d44a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 648.045315] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] Expecting reply to msg a549fcef79414d63a43986cd0655bb71 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 648.051933] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a549fcef79414d63a43986cd0655bb71 [ 648.110648] env[62109]: ERROR nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. [ 648.110648] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 648.110648] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.110648] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 648.110648] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 648.110648] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 648.110648] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 648.110648] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 648.110648] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.110648] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 648.110648] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.110648] env[62109]: ERROR nova.compute.manager raise self.value [ 648.110648] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 648.110648] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 648.110648] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.110648] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 648.111092] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.111092] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 648.111092] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. [ 648.111092] env[62109]: ERROR nova.compute.manager [ 648.111092] env[62109]: Traceback (most recent call last): [ 648.111092] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 648.111092] env[62109]: listener.cb(fileno) [ 648.111092] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.111092] env[62109]: result = function(*args, **kwargs) [ 648.111092] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 648.111092] env[62109]: return func(*args, **kwargs) [ 648.111092] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 648.111092] env[62109]: raise e [ 648.111092] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.111092] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 648.111092] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 648.111092] env[62109]: created_port_ids = self._update_ports_for_instance( [ 648.111092] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 648.111092] env[62109]: with excutils.save_and_reraise_exception(): [ 648.111092] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.111092] env[62109]: self.force_reraise() [ 648.111092] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.111092] env[62109]: raise self.value [ 648.111092] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 648.111092] env[62109]: updated_port = self._update_port( [ 648.111092] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.111092] env[62109]: _ensure_no_port_binding_failure(port) [ 648.111092] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.111092] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 648.111812] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. [ 648.111812] env[62109]: Removing descriptor: 16 [ 648.111812] env[62109]: ERROR nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] Traceback (most recent call last): [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] yield resources [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self.driver.spawn(context, instance, image_meta, [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self._vmops.spawn(context, instance, image_meta, injected_files, [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 648.111812] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] vm_ref = self.build_virtual_machine(instance, [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] vif_infos = vmwarevif.get_vif_info(self._session, [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] for vif in network_info: [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] return self._sync_wrapper(fn, *args, **kwargs) [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self.wait() [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self[:] = self._gt.wait() [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] return self._exit_event.wait() [ 648.112137] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] result = hub.switch() [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] return self.greenlet.switch() [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] result = function(*args, **kwargs) [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] return func(*args, **kwargs) [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] raise e [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] nwinfo = self.network_api.allocate_for_instance( [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 648.112470] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] created_port_ids = self._update_ports_for_instance( [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] with excutils.save_and_reraise_exception(): [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self.force_reraise() [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] raise self.value [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] updated_port = self._update_port( [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] _ensure_no_port_binding_failure(port) [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.112796] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] raise exception.PortBindingFailed(port_id=port['id']) [ 648.113209] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] nova.exception.PortBindingFailed: Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. [ 648.113209] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] [ 648.113209] env[62109]: INFO nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Terminating instance [ 648.114365] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Acquiring lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.227909] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.228566] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 648.230371] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 7f4603064f3a4d7cbe7e13a80832acfc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 648.231483] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.455s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.233675] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 357553b13b2449b59f8aa8917c8286a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 648.266795] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f4603064f3a4d7cbe7e13a80832acfc [ 648.268183] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 357553b13b2449b59f8aa8917c8286a9 [ 648.406745] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.570572] env[62109]: DEBUG nova.network.neutron [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.683839] env[62109]: DEBUG nova.network.neutron [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.684595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] Expecting reply to msg 805924b4a41c4e62b55d0899fd7d5e4e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 648.692790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 805924b4a41c4e62b55d0899fd7d5e4e [ 648.739824] env[62109]: DEBUG nova.compute.utils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.740938] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 673dc7a31f6a42f596a235f9d1ad3995 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 648.742573] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 648.743642] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 648.752239] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 673dc7a31f6a42f596a235f9d1ad3995 [ 648.870086] env[62109]: DEBUG nova.policy [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bedc426b7fca4be7bb9c9d2edd41bc83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0ff66f09fe24c15a6498a601ccb43d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 649.111454] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89923b99-abbd-4b1f-8b0d-13390b6bfcf5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.119708] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67783ee3-198f-4746-bf3e-625af198e8da {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.157752] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d70e932-e6c8-457d-bb28-61bdb1a76a6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.165868] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e632efa2-f3de-405e-ab31-3e8a431e6f6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.181719] env[62109]: DEBUG nova.compute.provider_tree [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.182390] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 345f609c90b74b0fbea20d197b53e92a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 649.187311] env[62109]: DEBUG oslo_concurrency.lockutils [req-9cccfa82-4233-4f2d-8269-9b0d2a8139f9 req-aaf41b11-c69c-4c43-aa48-fe2b62a0123e service nova] Releasing lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.187772] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Acquired lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.188145] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 649.188848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 09f5bf80b0394da8819fdda0672daa39 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 649.195316] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 345f609c90b74b0fbea20d197b53e92a [ 649.195790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09f5bf80b0394da8819fdda0672daa39 [ 649.246218] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 649.247999] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 043568faeb894b21890e40478eeaebcb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 649.281120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 043568faeb894b21890e40478eeaebcb [ 649.572938] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Successfully created port: 59e1287a-ba70-419b-a589-4ff8cd85c074 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.685029] env[62109]: DEBUG nova.scheduler.client.report [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 649.687675] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 039a3eab6a2b4830bb74e7d30aafca55 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 649.699890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 039a3eab6a2b4830bb74e7d30aafca55 [ 649.716488] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 649.752218] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg dbefe340b33145a29e95a6e6575fe1f5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 649.782230] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.782752] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg e43bf09e610642eab8a95d47a18d2c21 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 649.786462] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbefe340b33145a29e95a6e6575fe1f5 [ 649.790978] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e43bf09e610642eab8a95d47a18d2c21 [ 650.178005] env[62109]: DEBUG nova.compute.manager [req-8b6ccb51-57fb-4593-90a0-a94e7ed3bbe4 req-646eedd6-84df-4f74-9c3f-a9cdb37e9381 service nova] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Received event network-vif-deleted-530e4dd8-07d2-480d-9d84-700373a8d44a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 650.190135] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.959s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.191031] env[62109]: ERROR nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Traceback (most recent call last): [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self.driver.spawn(context, instance, image_meta, [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] vm_ref = self.build_virtual_machine(instance, [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.191031] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] for vif in network_info: [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] return self._sync_wrapper(fn, *args, **kwargs) [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self.wait() [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self[:] = self._gt.wait() [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] return self._exit_event.wait() [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] result = hub.switch() [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 650.191497] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] return self.greenlet.switch() [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] result = function(*args, **kwargs) [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] return func(*args, **kwargs) [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] raise e [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] nwinfo = self.network_api.allocate_for_instance( [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] created_port_ids = self._update_ports_for_instance( [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] with excutils.save_and_reraise_exception(): [ 650.191994] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] self.force_reraise() [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] raise self.value [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] updated_port = self._update_port( [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] _ensure_no_port_binding_failure(port) [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] raise exception.PortBindingFailed(port_id=port['id']) [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] nova.exception.PortBindingFailed: Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. [ 650.192500] env[62109]: ERROR nova.compute.manager [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] [ 650.192932] env[62109]: DEBUG nova.compute.utils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 650.192932] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.399s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.194644] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg aa33ac686c0c4fd69764e5ab080e017a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 650.197041] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Build of instance 35411b03-ace3-40da-8c3e-3872ac003bd3 was re-scheduled: Binding failed for port 4757bd66-fea5-408a-b5a7-7b719b899f50, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 650.197041] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 650.197041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Acquiring lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.197041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Acquired lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.197335] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 650.198074] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 626dc6ca176149eab37f486816f28f1f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 650.205031] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 626dc6ca176149eab37f486816f28f1f [ 650.234712] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa33ac686c0c4fd69764e5ab080e017a [ 650.255191] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 650.284016] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 650.284276] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 650.284434] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.284612] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 650.284770] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.284919] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 650.285191] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 650.285357] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 650.285526] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 650.285696] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 650.285874] env[62109]: DEBUG nova.virt.hardware [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 650.286383] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Releasing lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.286768] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 650.286968] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 650.287782] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73b7892-f83e-495d-a583-c695ba7ded54 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.290433] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb889e4a-4c87-4c9e-8010-9d3214212765 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.298303] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b970ba-2ba9-44f2-94e6-c54d4b7addbe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.304828] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff955e0-4370-4fff-8bad-6642f93b73c1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.331798] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 15e2e743-070f-4545-b976-ced38fd99198 could not be found. [ 650.332063] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 650.332264] env[62109]: INFO nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Took 0.05 seconds to destroy the instance on the hypervisor. [ 650.332517] env[62109]: DEBUG oslo.service.loopingcall [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.332720] env[62109]: DEBUG nova.compute.manager [-] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 650.332813] env[62109]: DEBUG nova.network.neutron [-] [instance: 15e2e743-070f-4545-b976-ced38fd99198] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.358277] env[62109]: DEBUG nova.network.neutron [-] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.358798] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 36abed20edcc4eb6b00b00d094261364 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 650.365518] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36abed20edcc4eb6b00b00d094261364 [ 650.662450] env[62109]: ERROR nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. [ 650.662450] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 650.662450] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.662450] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 650.662450] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.662450] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 650.662450] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.662450] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 650.662450] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.662450] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 650.662450] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.662450] env[62109]: ERROR nova.compute.manager raise self.value [ 650.662450] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.662450] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 650.662450] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.662450] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 650.663077] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.663077] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 650.663077] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. [ 650.663077] env[62109]: ERROR nova.compute.manager [ 650.663077] env[62109]: Traceback (most recent call last): [ 650.663077] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 650.663077] env[62109]: listener.cb(fileno) [ 650.663077] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.663077] env[62109]: result = function(*args, **kwargs) [ 650.663077] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.663077] env[62109]: return func(*args, **kwargs) [ 650.663077] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.663077] env[62109]: raise e [ 650.663077] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.663077] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 650.663077] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.663077] env[62109]: created_port_ids = self._update_ports_for_instance( [ 650.663077] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.663077] env[62109]: with excutils.save_and_reraise_exception(): [ 650.663077] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.663077] env[62109]: self.force_reraise() [ 650.663077] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.663077] env[62109]: raise self.value [ 650.663077] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.663077] env[62109]: updated_port = self._update_port( [ 650.663077] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.663077] env[62109]: _ensure_no_port_binding_failure(port) [ 650.663077] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.663077] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 650.664385] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. [ 650.664385] env[62109]: Removing descriptor: 16 [ 650.664385] env[62109]: ERROR nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Traceback (most recent call last): [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] yield resources [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self.driver.spawn(context, instance, image_meta, [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.664385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] vm_ref = self.build_virtual_machine(instance, [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] for vif in network_info: [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] return self._sync_wrapper(fn, *args, **kwargs) [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self.wait() [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self[:] = self._gt.wait() [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] return self._exit_event.wait() [ 650.664879] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] result = hub.switch() [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] return self.greenlet.switch() [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] result = function(*args, **kwargs) [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] return func(*args, **kwargs) [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] raise e [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] nwinfo = self.network_api.allocate_for_instance( [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.665387] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] created_port_ids = self._update_ports_for_instance( [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] with excutils.save_and_reraise_exception(): [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self.force_reraise() [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] raise self.value [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] updated_port = self._update_port( [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] _ensure_no_port_binding_failure(port) [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.665906] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] raise exception.PortBindingFailed(port_id=port['id']) [ 650.666385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] nova.exception.PortBindingFailed: Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. [ 650.666385] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] [ 650.666385] env[62109]: INFO nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Terminating instance [ 650.666385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Acquiring lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.666385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Acquired lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.666385] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 650.666694] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg b1c9fcb42eba4415b68da54873c0d63a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 650.677876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1c9fcb42eba4415b68da54873c0d63a [ 650.709948] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "66a0a424-ecb6-43df-9b47-946ff1e1b7b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.710217] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "66a0a424-ecb6-43df-9b47-946ff1e1b7b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.719206] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.860813] env[62109]: DEBUG nova.network.neutron [-] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.861607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6f4b0d109e0d4b74a07464c569be22a4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 650.882697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f4b0d109e0d4b74a07464c569be22a4 [ 650.987794] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.988405] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 87bdddf01d3540ab986cfc93e8efb3e0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 650.998818] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87bdddf01d3540ab986cfc93e8efb3e0 [ 651.161599] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb5255e-6cbf-428b-bd0e-e05fe6cce641 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.177020] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed2ca13-35ba-42e4-8685-24ecc9180b8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.210998] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.213496] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1c03a3-4bc3-4ac9-b7a8-34227e9d10a1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.221355] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9c8578-444a-4d73-9753-25f249fc4cbe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.235775] env[62109]: DEBUG nova.compute.provider_tree [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.236351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 4ca2d1f2eb8a4ea8b62a2c0e91407244 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 651.244980] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ca2d1f2eb8a4ea8b62a2c0e91407244 [ 651.328620] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.329154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg de024ae91851458fa2957e8db8721107 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 651.337905] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de024ae91851458fa2957e8db8721107 [ 651.373656] env[62109]: INFO nova.compute.manager [-] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Took 1.04 seconds to deallocate network for instance. [ 651.375905] env[62109]: DEBUG nova.compute.claims [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 651.376113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.491039] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Releasing lock "refresh_cache-35411b03-ace3-40da-8c3e-3872ac003bd3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.491290] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 651.491473] env[62109]: DEBUG nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 651.491639] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 651.507760] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.508814] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 1bdc7f63c0114dd8be34050ab678d424 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 651.516953] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bdc7f63c0114dd8be34050ab678d424 [ 651.743341] env[62109]: DEBUG nova.scheduler.client.report [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.743341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg cf115520ee934d4a83efc7469fc92f29 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 651.758643] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf115520ee934d4a83efc7469fc92f29 [ 651.840661] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Releasing lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.840661] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 651.840661] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 651.840661] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e5e6956-70e4-40f8-a685-6422acb48822 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.848899] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bdcd85-045d-4a57-869b-8cf7cf040b61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.870373] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2fa640c2-b433-4581-be4b-0673c1451043 could not be found. [ 651.870735] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 651.871050] env[62109]: INFO nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Took 0.03 seconds to destroy the instance on the hypervisor. [ 651.871400] env[62109]: DEBUG oslo.service.loopingcall [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 651.871737] env[62109]: DEBUG nova.compute.manager [-] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 651.871981] env[62109]: DEBUG nova.network.neutron [-] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 651.899045] env[62109]: DEBUG nova.network.neutron [-] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.899905] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c9e0ef2f1e00418ba4dc40e11bf525e1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 651.907025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9e0ef2f1e00418ba4dc40e11bf525e1 [ 652.012048] env[62109]: DEBUG nova.network.neutron [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.012048] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 1a90ae84bccb41c2879579c3fb72be68 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 652.019359] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a90ae84bccb41c2879579c3fb72be68 [ 652.207556] env[62109]: DEBUG nova.compute.manager [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Received event network-changed-59e1287a-ba70-419b-a589-4ff8cd85c074 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 652.214806] env[62109]: DEBUG nova.compute.manager [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Refreshing instance network info cache due to event network-changed-59e1287a-ba70-419b-a589-4ff8cd85c074. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 652.214806] env[62109]: DEBUG oslo_concurrency.lockutils [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] Acquiring lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.214806] env[62109]: DEBUG oslo_concurrency.lockutils [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] Acquired lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.214806] env[62109]: DEBUG nova.network.neutron [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Refreshing network info cache for port 59e1287a-ba70-419b-a589-4ff8cd85c074 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 652.214806] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] Expecting reply to msg 9f61d4ab1ae4491097a3e0c7b917fd85 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 652.216305] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f61d4ab1ae4491097a3e0c7b917fd85 [ 652.239212] env[62109]: DEBUG nova.network.neutron [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.244336] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.052s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.250939] env[62109]: ERROR nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Traceback (most recent call last): [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self.driver.spawn(context, instance, image_meta, [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] vm_ref = self.build_virtual_machine(instance, [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] vif_infos = vmwarevif.get_vif_info(self._session, [ 652.250939] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] for vif in network_info: [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] return self._sync_wrapper(fn, *args, **kwargs) [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self.wait() [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self[:] = self._gt.wait() [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] return self._exit_event.wait() [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] result = hub.switch() [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 652.251280] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] return self.greenlet.switch() [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] result = function(*args, **kwargs) [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] return func(*args, **kwargs) [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] raise e [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] nwinfo = self.network_api.allocate_for_instance( [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] created_port_ids = self._update_ports_for_instance( [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] with excutils.save_and_reraise_exception(): [ 652.251612] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] self.force_reraise() [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] raise self.value [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] updated_port = self._update_port( [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] _ensure_no_port_binding_failure(port) [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] raise exception.PortBindingFailed(port_id=port['id']) [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] nova.exception.PortBindingFailed: Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. [ 652.251994] env[62109]: ERROR nova.compute.manager [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] [ 652.253200] env[62109]: DEBUG nova.compute.utils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 652.258067] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.429s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.259373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 6992f86b9b82445782f0be7fe813bd07 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 652.260975] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Build of instance 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee was re-scheduled: Binding failed for port 0a638f3f-d212-4e30-8d09-d277b8a79f1f, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 652.261572] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 652.261899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Acquiring lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.262152] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Acquired lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.262478] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 652.263056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 1aa1fa344ec841abb94cb56eebc75aa8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 652.269418] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1aa1fa344ec841abb94cb56eebc75aa8 [ 652.286987] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6992f86b9b82445782f0be7fe813bd07 [ 652.408119] env[62109]: DEBUG nova.network.neutron [-] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.408597] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3fc02b20096c4a2389a7d0cd64812c1e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 652.421390] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fc02b20096c4a2389a7d0cd64812c1e [ 652.433499] env[62109]: DEBUG nova.network.neutron [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.434008] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] Expecting reply to msg 4e0a104aac244cba9805ed9f0de9c617 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 652.445060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e0a104aac244cba9805ed9f0de9c617 [ 652.514272] env[62109]: INFO nova.compute.manager [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] [instance: 35411b03-ace3-40da-8c3e-3872ac003bd3] Took 1.02 seconds to deallocate network for instance. [ 652.516247] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 6fdd8a919c354f33ae014dd062ed5135 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 652.552900] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fdd8a919c354f33ae014dd062ed5135 [ 652.766594] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 4dbbfee74e604694b2579a2f43ced251 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 652.786158] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.797603] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dbbfee74e604694b2579a2f43ced251 [ 652.913234] env[62109]: INFO nova.compute.manager [-] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Took 1.04 seconds to deallocate network for instance. [ 652.914226] env[62109]: DEBUG nova.compute.claims [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 652.914226] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.936744] env[62109]: DEBUG oslo_concurrency.lockutils [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] Releasing lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.936744] env[62109]: DEBUG nova.compute.manager [req-2c9cf0e7-43ed-4d05-8a80-ed6f01ab3372 req-0ba71ded-6fca-4158-9210-ed96771ae08b service nova] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Received event network-vif-deleted-59e1287a-ba70-419b-a589-4ff8cd85c074 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 653.021885] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg c258003166d94eb9b3da7641ecdbfed6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 653.059121] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c258003166d94eb9b3da7641ecdbfed6 [ 653.081172] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.081172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 542b48f29f5c48dc86b7eaed7b3cb175 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 653.089874] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 542b48f29f5c48dc86b7eaed7b3cb175 [ 653.293102] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 0b3c9f51c4964437b359fbc0e3e1ef23 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 653.303648] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b3c9f51c4964437b359fbc0e3e1ef23 [ 653.543832] env[62109]: INFO nova.scheduler.client.report [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Deleted allocations for instance 35411b03-ace3-40da-8c3e-3872ac003bd3 [ 653.549909] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Expecting reply to msg 898cbae1b68e4c599c08bc7a20347589 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 653.562808] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898cbae1b68e4c599c08bc7a20347589 [ 653.583489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Releasing lock "refresh_cache-3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.583879] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 653.584143] env[62109]: DEBUG nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 653.584370] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 653.606590] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 653.607248] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 0ac7913bf6354786800d026234b851bb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 653.614647] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ac7913bf6354786800d026234b851bb [ 653.796150] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 35411b03-ace3-40da-8c3e-3872ac003bd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.796453] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 653.796810] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg b3572e19a5f1431696cd812223c3c42c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 653.809082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3572e19a5f1431696cd812223c3c42c [ 654.052502] env[62109]: DEBUG oslo_concurrency.lockutils [None req-28341f39-563b-434f-9742-0ac8f24e40ab tempest-ServersTestManualDisk-2065425642 tempest-ServersTestManualDisk-2065425642-project-member] Lock "35411b03-ace3-40da-8c3e-3872ac003bd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.145s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.053165] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 1dafc0bf011f4cd18a95da8b34f97055 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 654.062326] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dafc0bf011f4cd18a95da8b34f97055 [ 654.109412] env[62109]: DEBUG nova.network.neutron [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.109941] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 1b3b2e9e557343d2a42be0b0d174ef86 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 654.119334] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b3b2e9e557343d2a42be0b0d174ef86 [ 654.299759] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.299759] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 80a15f60-4843-4a59-a6c1-0d5624609672 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 654.299759] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 15e2e743-070f-4545-b976-ced38fd99198 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 654.299759] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 2fa640c2-b433-4581-be4b-0673c1451043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 654.300377] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 84cb35c149da47f1a6ced7e106d57de6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 654.324325] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84cb35c149da47f1a6ced7e106d57de6 [ 654.555310] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 654.557296] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 7a509683bf2a4ddd82c17447005d1d37 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 654.597201] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a509683bf2a4ddd82c17447005d1d37 [ 654.612416] env[62109]: INFO nova.compute.manager [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] [instance: 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee] Took 1.03 seconds to deallocate network for instance. [ 654.614131] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg cecbf0598aaf4a458aed9c2d1a294c22 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 654.647308] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cecbf0598aaf4a458aed9c2d1a294c22 [ 654.804220] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 535045d9-108e-4e88-82f0-9da98f2f55a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.804785] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 28b604a91d2646f9836be8f98669f86b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 654.814437] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28b604a91d2646f9836be8f98669f86b [ 655.078717] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.118625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 04165fa563e0424f8d532b25722587e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 655.157765] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04165fa563e0424f8d532b25722587e2 [ 655.307071] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 83a25ff9-cc7d-4917-95cc-e621884bcee8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.307711] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 008165ab8856403ba45c0ceb696f4fee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 655.317845] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 008165ab8856403ba45c0ceb696f4fee [ 655.651868] env[62109]: INFO nova.scheduler.client.report [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Deleted allocations for instance 3454f8a4-a9c7-4622-9cdc-36f683b6f3ee [ 655.658762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Expecting reply to msg 1cf315b0b70645d08386eaaf44261bbe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 655.678151] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cf315b0b70645d08386eaaf44261bbe [ 655.810311] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance b537150e-9136-4fa4-b092-4f4995b918b7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.810915] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 1d28670cdfa140be8e9e808ac88acf32 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 655.822217] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d28670cdfa140be8e9e808ac88acf32 [ 656.534766] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d2fd4c59-8101-4bad-b7a3-afdc2994a4b5 tempest-ImagesOneServerTestJSON-1598717001 tempest-ImagesOneServerTestJSON-1598717001-project-member] Lock "3454f8a4-a9c7-4622-9cdc-36f683b6f3ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.730s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.535429] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance fe9756ba-0eb5-41ad-913f-e933f97542cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.535933] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg de5c9c0abc2a4aa19a9b411941d363ad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 656.537212] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 52af543b65204c2f8c6434ba63658fff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 656.552283] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de5c9c0abc2a4aa19a9b411941d363ad [ 656.552776] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52af543b65204c2f8c6434ba63658fff [ 657.039727] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 11a6eaa1-0d35-49cf-9341-b74129cf087b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.040313] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 8cb734cc40404678bf3e4bf7b64ce520 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 657.041262] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 657.042988] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 0a160bbb12df4dca9faaff0ada77636b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 657.051101] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cb734cc40404678bf3e4bf7b64ce520 [ 657.078915] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a160bbb12df4dca9faaff0ada77636b [ 657.430439] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "e8c77459-e3a3-4a68-9f76-0757dd0f2587" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.430831] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "e8c77459-e3a3-4a68-9f76-0757dd0f2587" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.545256] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3a4e1dcc-610f-4037-94e9-c9815c12ed1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.545990] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 8982c4589b924c1caaa70b840bb2f38e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 657.557957] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8982c4589b924c1caaa70b840bb2f38e [ 657.569084] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.985604] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "9a1c4327-64b3-4c4d-b6ae-77959084b405" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.986376] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "9a1c4327-64b3-4c4d-b6ae-77959084b405" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.053527] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 0e018d70-d6dd-4f79-bb03-14b815645562 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 658.053527] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 1616d40fd78b4ddab716ee4d14c3219e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 658.066808] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1616d40fd78b4ddab716ee4d14c3219e [ 658.316349] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "a6ec5486-0843-4c38-b187-35d5296965a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.316586] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "a6ec5486-0843-4c38-b187-35d5296965a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.555135] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance c9b2ced5-a77c-4bff-b115-ce5c523be630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 658.558302] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg b063ce4f2e39432488a851a51efe2706 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 658.566379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b063ce4f2e39432488a851a51efe2706 [ 659.061013] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 26a287d7-4602-4d83-8828-41870a49c343 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.061724] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg cd0f3e758bc54172a12b94bd0b7dae7f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 659.072151] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd0f3e758bc54172a12b94bd0b7dae7f [ 659.565240] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.565812] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 153fe3b2896c43b781b7aabdcbf0f264 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 659.575949] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 153fe3b2896c43b781b7aabdcbf0f264 [ 660.068397] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 29715a53-7a71-4708-b522-e678fe5bd6a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.068942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg f46aa22ae3de483e87b15753b5ebdd7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 660.079449] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f46aa22ae3de483e87b15753b5ebdd7d [ 660.574537] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 446bd52c-4ffb-4e77-89fb-3e8535ceb4af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.574537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 66bf1af4289f47d293abfd9f27ece095 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 660.584442] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66bf1af4289f47d293abfd9f27ece095 [ 661.077035] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 6f8e35f3-4b35-449c-9e60-1e0624f41cd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.077035] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 1d215cf94a4a44ad9bce7c06673a8263 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 661.087595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d215cf94a4a44ad9bce7c06673a8263 [ 661.580031] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance c9a6d28b-52f8-4636-886a-c74f0900e761 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.580467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg b05fde514c3240d987465a3e43da7890 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 661.592110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b05fde514c3240d987465a3e43da7890 [ 662.082963] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.083634] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 96d33cb7a7e5465686caf6ab19fb9239 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 662.094494] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96d33cb7a7e5465686caf6ab19fb9239 [ 662.586831] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 9f77d364-928f-4595-9253-8bb216b9215b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.587257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg f7429af677204517847d18eb5ba32024 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 662.599700] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7429af677204517847d18eb5ba32024 [ 663.090293] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 832c9ce1-6344-485a-a9ef-6950d1c78ef9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.090915] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg b9009ba582e548b0b2a9ff96f58bf769 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 663.117078] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9009ba582e548b0b2a9ff96f58bf769 [ 663.223506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Acquiring lock "c7ec619c-1b00-4d58-a593-671c0139c4e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.223823] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Lock "c7ec619c-1b00-4d58-a593-671c0139c4e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.592904] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3d99c7df-b031-4187-988c-f642f79073d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.594094] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ec2fb1fb0b3a444fbd28136395e92cbc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 663.608727] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec2fb1fb0b3a444fbd28136395e92cbc [ 664.102092] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 87304cf6-e65f-41de-ab6f-d2170aaa9064 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.102749] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg d8672679761c4bc8a9c0ebc6ddbdc18b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 664.113633] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8672679761c4bc8a9c0ebc6ddbdc18b [ 664.605301] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 90c50f92-c1ff-4ac9-a819-ae0083884e28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.611831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 1f84c9d7c88a4d79b76c33f42aebf542 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 664.616356] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f84c9d7c88a4d79b76c33f42aebf542 [ 665.108632] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance ae026dca-dc05-4710-8a03-4e792a0dc61d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.109243] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg e66dd592abeb45dbbe20d2b3ec79cec5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 665.125624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e66dd592abeb45dbbe20d2b3ec79cec5 [ 665.612076] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 16b04a1b-0ab3-4386-a1eb-74ef3e46a553 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.612678] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 496559f35d0a4dfa90c89bd0fcd82954 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 665.623431] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 496559f35d0a4dfa90c89bd0fcd82954 [ 666.115281] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 08638aac-2c6c-4580-9894-6b3b3c1ec484 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.115945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 50046fcd616c40efbc5cbb57073c1a38 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 666.126505] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50046fcd616c40efbc5cbb57073c1a38 [ 666.618891] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 66a0a424-ecb6-43df-9b47-946ff1e1b7b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.619219] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 666.619361] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 667.039633] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b0efb4-3948-4d56-8610-2136d58f147c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.050113] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82a8cce-fa5b-4424-87f7-98e536a1e500 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.080602] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fac0754-40b3-4e27-90c4-f2b466857530 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.089409] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106523e6-b6fb-4045-9326-75e43b7075ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.102963] env[62109]: DEBUG nova.compute.provider_tree [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.103465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 587b5e6867434020b5663a281b8edc57 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 667.111417] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 587b5e6867434020b5663a281b8edc57 [ 667.606379] env[62109]: DEBUG nova.scheduler.client.report [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 667.608619] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg fe1c7953686f4c9bb62f1081935b8254 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 667.620326] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe1c7953686f4c9bb62f1081935b8254 [ 668.112036] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 668.112036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 15.854s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.112036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.658s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.113499] env[62109]: INFO nova.compute.claims [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.115030] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 0ac678d0cc274e35bdddc13b171d7568 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 668.158287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ac678d0cc274e35bdddc13b171d7568 [ 668.619409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 24680f90dc5748b5a546fa36b00f0599 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 668.627499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24680f90dc5748b5a546fa36b00f0599 [ 669.453045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b973f50-b3a2-40bf-bc7d-bea6f85afc84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.460903] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9c05c4-9a11-4c5a-8f52-70eb975e2b37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.492775] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3ca801-7d11-4065-8d13-6ab28c4d714e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.500669] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46407ea-c595-4f47-b0d9-368d41ca1807 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.514982] env[62109]: DEBUG nova.compute.provider_tree [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.515594] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 740ddda46e03493ebfd6ecd830e8ef76 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 669.523923] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 740ddda46e03493ebfd6ecd830e8ef76 [ 670.017992] env[62109]: DEBUG nova.scheduler.client.report [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.020600] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg e3074229a73242cc9ca5c17a607d5e03 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 670.031266] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3074229a73242cc9ca5c17a607d5e03 [ 670.522916] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.523459] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 670.525135] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 878d3e7709e54fe09b6f2404f0008b7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 670.526324] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.029s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.528206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg aeca213335a14abc9a579e54464d716f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 670.555910] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 878d3e7709e54fe09b6f2404f0008b7d [ 670.559883] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aeca213335a14abc9a579e54464d716f [ 671.031382] env[62109]: DEBUG nova.compute.utils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 671.032051] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 7654300f5d9a4f4fa546ca125215eead in queue reply_7522b64acfeb4981b1f36928b040d568 [ 671.037130] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 671.037319] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 671.048745] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7654300f5d9a4f4fa546ca125215eead [ 671.221808] env[62109]: DEBUG nova.policy [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6349e1aff7d945a6a471b1f4e826b23f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61a866168186462d9d849072a1ff25f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 671.447554] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4157fe1-2f6f-40fc-95be-56825e0be95f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.455054] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76bb95a-21fb-49ee-ac7f-5156c162e466 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.485031] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6c0f33-91b6-4db4-89d2-3989d356dd4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.492316] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05059f35-4542-4286-a83a-76fc5976e17d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.510837] env[62109]: DEBUG nova.compute.provider_tree [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.511360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 5f687eb87d1b4133b28194efcf1b3ae2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 671.519576] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f687eb87d1b4133b28194efcf1b3ae2 [ 671.528430] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Successfully created port: d1298831-ff8d-4c39-806b-3bb7c2472352 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.541126] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 671.542896] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg c5c3ce36070e414dbc652a205a59e440 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 671.572156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5c3ce36070e414dbc652a205a59e440 [ 672.020600] env[62109]: DEBUG nova.scheduler.client.report [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.023140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg b290ce3775374359897debcde0710838 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 672.034316] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b290ce3775374359897debcde0710838 [ 672.047020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 7482e5173e45439da79b918320aaed4e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 672.077106] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7482e5173e45439da79b918320aaed4e [ 672.252476] env[62109]: DEBUG nova.compute.manager [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Received event network-changed-d1298831-ff8d-4c39-806b-3bb7c2472352 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 672.254000] env[62109]: DEBUG nova.compute.manager [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Refreshing instance network info cache due to event network-changed-d1298831-ff8d-4c39-806b-3bb7c2472352. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 672.254233] env[62109]: DEBUG oslo_concurrency.lockutils [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] Acquiring lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.254370] env[62109]: DEBUG oslo_concurrency.lockutils [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] Acquired lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.254521] env[62109]: DEBUG nova.network.neutron [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Refreshing network info cache for port d1298831-ff8d-4c39-806b-3bb7c2472352 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 672.254950] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] Expecting reply to msg 67e28c631e204ba58cf02e847b716ae2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 672.261824] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67e28c631e204ba58cf02e847b716ae2 [ 672.461377] env[62109]: ERROR nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. [ 672.461377] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 672.461377] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.461377] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 672.461377] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.461377] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 672.461377] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.461377] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 672.461377] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.461377] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 672.461377] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.461377] env[62109]: ERROR nova.compute.manager raise self.value [ 672.461377] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.461377] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 672.461377] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.461377] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 672.461846] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.461846] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 672.461846] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. [ 672.461846] env[62109]: ERROR nova.compute.manager [ 672.461846] env[62109]: Traceback (most recent call last): [ 672.461846] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 672.461846] env[62109]: listener.cb(fileno) [ 672.461846] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.461846] env[62109]: result = function(*args, **kwargs) [ 672.461846] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.461846] env[62109]: return func(*args, **kwargs) [ 672.461846] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.461846] env[62109]: raise e [ 672.461846] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.461846] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 672.461846] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.461846] env[62109]: created_port_ids = self._update_ports_for_instance( [ 672.461846] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.461846] env[62109]: with excutils.save_and_reraise_exception(): [ 672.461846] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.461846] env[62109]: self.force_reraise() [ 672.461846] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.461846] env[62109]: raise self.value [ 672.461846] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.461846] env[62109]: updated_port = self._update_port( [ 672.461846] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.461846] env[62109]: _ensure_no_port_binding_failure(port) [ 672.461846] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.461846] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 672.462584] env[62109]: nova.exception.PortBindingFailed: Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. [ 672.462584] env[62109]: Removing descriptor: 16 [ 672.530625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.004s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.531267] env[62109]: ERROR nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Traceback (most recent call last): [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self.driver.spawn(context, instance, image_meta, [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] vm_ref = self.build_virtual_machine(instance, [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.531267] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] for vif in network_info: [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] return self._sync_wrapper(fn, *args, **kwargs) [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self.wait() [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self[:] = self._gt.wait() [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] return self._exit_event.wait() [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] result = hub.switch() [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.531601] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] return self.greenlet.switch() [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] result = function(*args, **kwargs) [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] return func(*args, **kwargs) [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] raise e [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] nwinfo = self.network_api.allocate_for_instance( [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] created_port_ids = self._update_ports_for_instance( [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] with excutils.save_and_reraise_exception(): [ 672.531960] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] self.force_reraise() [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] raise self.value [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] updated_port = self._update_port( [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] _ensure_no_port_binding_failure(port) [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] raise exception.PortBindingFailed(port_id=port['id']) [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] nova.exception.PortBindingFailed: Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. [ 672.532336] env[62109]: ERROR nova.compute.manager [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] [ 672.532649] env[62109]: DEBUG nova.compute.utils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 672.533201] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.790s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.534670] env[62109]: INFO nova.compute.claims [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.536270] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 91e1b00086574181b2718326c4ec8a63 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 672.537435] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Build of instance fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932 was re-scheduled: Binding failed for port a8f53cf7-372e-482c-94e0-6ed3a6e4442a, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 672.537858] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 672.538155] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.538302] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquired lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.538458] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 672.538810] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 18ecb85908c34236bbaf1076cddc4353 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 672.545685] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18ecb85908c34236bbaf1076cddc4353 [ 672.549228] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 672.574671] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91e1b00086574181b2718326c4ec8a63 [ 672.581136] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 672.581356] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 672.581507] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.581683] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 672.581822] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.581963] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 672.582166] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 672.582327] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 672.582483] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 672.582636] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 672.582798] env[62109]: DEBUG nova.virt.hardware [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 672.583811] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db1ef21-aa72-44bd-9f07-e97e98c719b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.596499] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471f21a1-ce2f-4fdb-bfc1-5a4788fd084f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.609562] env[62109]: ERROR nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Traceback (most recent call last): [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] yield resources [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self.driver.spawn(context, instance, image_meta, [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] vm_ref = self.build_virtual_machine(instance, [ 672.609562] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] for vif in network_info: [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] return self._sync_wrapper(fn, *args, **kwargs) [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self.wait() [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self[:] = self._gt.wait() [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] return self._exit_event.wait() [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 672.609945] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] current.throw(*self._exc) [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] result = function(*args, **kwargs) [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] return func(*args, **kwargs) [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] raise e [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] nwinfo = self.network_api.allocate_for_instance( [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] created_port_ids = self._update_ports_for_instance( [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] with excutils.save_and_reraise_exception(): [ 672.610312] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self.force_reraise() [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] raise self.value [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] updated_port = self._update_port( [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] _ensure_no_port_binding_failure(port) [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] raise exception.PortBindingFailed(port_id=port['id']) [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] nova.exception.PortBindingFailed: Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. [ 672.610683] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] [ 672.610683] env[62109]: INFO nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Terminating instance [ 672.612090] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.770053] env[62109]: DEBUG nova.network.neutron [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.855031] env[62109]: DEBUG nova.network.neutron [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.855575] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] Expecting reply to msg efc457fcd7de403e950bffc000abf169 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 672.863912] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efc457fcd7de403e950bffc000abf169 [ 673.041689] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 70b6c8223f904260ba7ad1b5be9d5a52 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 673.050391] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70b6c8223f904260ba7ad1b5be9d5a52 [ 673.080759] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.154252] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.154901] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg c3b71e03647644668efb190f63db6006 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 673.163533] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3b71e03647644668efb190f63db6006 [ 673.358271] env[62109]: DEBUG oslo_concurrency.lockutils [req-e64da7ef-6885-4f7d-92e3-67a6fafc3949 req-7660c009-78fa-466f-8e9e-45518e42674f service nova] Releasing lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.358712] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.359020] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 673.359512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 7aff7a98d2bd4c22a8a4d454927dfbed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 673.367534] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7aff7a98d2bd4c22a8a4d454927dfbed [ 673.656988] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Releasing lock "refresh_cache-fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.657227] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 673.657405] env[62109]: DEBUG nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 673.657567] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 673.679327] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.679898] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 79745ce1fae94f96a35da6f22544bb54 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 673.687765] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79745ce1fae94f96a35da6f22544bb54 [ 673.876596] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.926367] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2fa432-d17b-4ee8-8cf8-ad28809ca5a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.933542] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd8222b-3671-489b-9a29-5b7fecba2921 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.963696] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.964251] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg a1a9a8e0b94b41fd9772951dbdf40653 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 673.965460] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6e8df9-2540-4ff9-b6f5-9532f1caf499 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.972659] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae53a025-5c99-42c5-8a24-2b55e7febfaa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.976579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1a9a8e0b94b41fd9772951dbdf40653 [ 673.986347] env[62109]: DEBUG nova.compute.provider_tree [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.986815] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 04223c09b1c2470b93e656505fccd7fe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 673.993884] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04223c09b1c2470b93e656505fccd7fe [ 674.182577] env[62109]: DEBUG nova.network.neutron [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.183285] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 4876f452252b4218ae119cc1853c6972 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 674.191662] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4876f452252b4218ae119cc1853c6972 [ 674.277050] env[62109]: DEBUG nova.compute.manager [req-a86c0c01-4437-41a5-b342-9124935d650c req-7cddce0f-318e-441a-8cb7-126ed84a2373 service nova] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Received event network-vif-deleted-d1298831-ff8d-4c39-806b-3bb7c2472352 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 674.469492] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.469928] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 674.470129] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 674.470454] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64355ecb-db1d-45fd-a485-b1f16a3b74c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.479515] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c652f1b8-ec7d-4eb3-9fbd-dae7fc1c5c6f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.489803] env[62109]: DEBUG nova.scheduler.client.report [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.492132] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg acb4275c8d3b4d03a8b5ba2ee1eb26cc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 674.504468] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 535045d9-108e-4e88-82f0-9da98f2f55a6 could not be found. [ 674.504714] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 674.504901] env[62109]: INFO nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 674.505141] env[62109]: DEBUG oslo.service.loopingcall [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.505367] env[62109]: DEBUG nova.compute.manager [-] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 674.505461] env[62109]: DEBUG nova.network.neutron [-] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 674.507659] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acb4275c8d3b4d03a8b5ba2ee1eb26cc [ 674.519090] env[62109]: DEBUG nova.network.neutron [-] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.519554] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ad09432e55ee4e2e9b8dec11aac9541f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 674.526235] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad09432e55ee4e2e9b8dec11aac9541f [ 674.687190] env[62109]: INFO nova.compute.manager [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932] Took 1.03 seconds to deallocate network for instance. [ 674.687190] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg c26ea9c576a94136adbf1f2a6d85bc55 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 674.720187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c26ea9c576a94136adbf1f2a6d85bc55 [ 674.995286] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.995704] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg b3ed0764831c4beabd917bb047733bbb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 674.996618] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.958s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.998548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 6d9852d732a343258d036910b92e5eb8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 675.012511] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3ed0764831c4beabd917bb047733bbb [ 675.022329] env[62109]: DEBUG nova.network.neutron [-] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.022787] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 78589fe527a44d1b83fc3a09e1155c37 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 675.029470] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78589fe527a44d1b83fc3a09e1155c37 [ 675.029973] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d9852d732a343258d036910b92e5eb8 [ 675.191880] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 47d9d40cfd134be3a57fe64df89f8d0a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 675.240238] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47d9d40cfd134be3a57fe64df89f8d0a [ 675.500749] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Acquiring lock "d2672e91-c619-4fa0-97cc-339ae750126b" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.501075] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Lock "d2672e91-c619-4fa0-97cc-339ae750126b" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.501629] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg e2a6a13e4c904b7e9176ae706200a980 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 675.506610] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2a6a13e4c904b7e9176ae706200a980 [ 675.525174] env[62109]: INFO nova.compute.manager [-] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Took 1.02 seconds to deallocate network for instance. [ 675.527183] env[62109]: DEBUG nova.compute.claims [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 675.527380] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.710317] env[62109]: INFO nova.scheduler.client.report [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Deleted allocations for instance fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932 [ 675.717766] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 2b502b0e0f8a4166af40a69e9d6e9928 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 675.733545] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b502b0e0f8a4166af40a69e9d6e9928 [ 675.947732] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daddb397-d862-4670-ab9c-68c96f4231b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.956305] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff5be86-ffe3-456b-8c7d-f4ab2ee59458 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.986328] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a24a4ce-9b7a-4edf-a7d9-6302423cbb2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.993765] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32f7cdd-a822-4b08-aca4-8810b4588465 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.008503] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Lock "d2672e91-c619-4fa0-97cc-339ae750126b" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.507s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.009015] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 676.010631] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg d53724d0df404bb6809aed721859baab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 676.012535] env[62109]: DEBUG nova.compute.provider_tree [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.013091] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 12104fe6e6e944119c3bbeb8ca116774 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 676.020093] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12104fe6e6e944119c3bbeb8ca116774 [ 676.040738] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d53724d0df404bb6809aed721859baab [ 676.222980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-63a73b02-9dca-4d4a-9b22-3f51d1c4763c tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "fa25ef8c-cb4a-4c5b-8157-3d2e1a2e3932" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.413s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.223297] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 7637ec18c1cd4a48b56c31334075defb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 676.233954] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7637ec18c1cd4a48b56c31334075defb [ 676.516116] env[62109]: DEBUG nova.compute.utils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.516795] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 73ec402b13dc4345bf3c5731bce8b53f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 676.518195] env[62109]: DEBUG nova.scheduler.client.report [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.520458] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 39079c8f340e4845ba3a89a274f1d866 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 676.521531] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 676.521699] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 676.528322] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73ec402b13dc4345bf3c5731bce8b53f [ 676.531606] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39079c8f340e4845ba3a89a274f1d866 [ 676.567259] env[62109]: DEBUG nova.policy [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98ec2a1e4163442a9dfd7ce25bf13d6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e587eb843ad3412dbd94b5fde13771b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 676.725854] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 676.727094] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg b1af8050e4204af19b6a2d660a1f8d55 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 676.757560] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1af8050e4204af19b6a2d660a1f8d55 [ 676.867846] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Successfully created port: 033d9bed-a864-43c3-8c4b-838811ea8db1 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.022447] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 677.024182] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 1d514f590153408ea10d83a43628d951 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 677.028436] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.029s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.028436] env[62109]: ERROR nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. [ 677.028436] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Traceback (most recent call last): [ 677.028436] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 677.028436] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self.driver.spawn(context, instance, image_meta, [ 677.028436] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 677.028436] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.028436] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.028436] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] vm_ref = self.build_virtual_machine(instance, [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] for vif in network_info: [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] return self._sync_wrapper(fn, *args, **kwargs) [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self.wait() [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self[:] = self._gt.wait() [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] return self._exit_event.wait() [ 677.028709] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] result = hub.switch() [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] return self.greenlet.switch() [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] result = function(*args, **kwargs) [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] return func(*args, **kwargs) [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] raise e [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] nwinfo = self.network_api.allocate_for_instance( [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 677.029181] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] created_port_ids = self._update_ports_for_instance( [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] with excutils.save_and_reraise_exception(): [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] self.force_reraise() [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] raise self.value [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] updated_port = self._update_port( [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] _ensure_no_port_binding_failure(port) [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.029490] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] raise exception.PortBindingFailed(port_id=port['id']) [ 677.029780] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] nova.exception.PortBindingFailed: Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. [ 677.029780] env[62109]: ERROR nova.compute.manager [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] [ 677.029780] env[62109]: DEBUG nova.compute.utils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 677.029780] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.694s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.029780] env[62109]: INFO nova.compute.claims [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.031056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 71be42e3037e4be2bc20a3ff117709bb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 677.032136] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Build of instance 80a15f60-4843-4a59-a6c1-0d5624609672 was re-scheduled: Binding failed for port 137bb12e-27d8-4868-8174-3f7459cea1f9, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 677.032886] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 677.032886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Acquiring lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.032886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Acquired lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.033068] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 677.033463] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 593c83f15fc9479abbacd48757b5cfb3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 677.038863] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 593c83f15fc9479abbacd48757b5cfb3 [ 677.057572] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d514f590153408ea10d83a43628d951 [ 677.063394] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71be42e3037e4be2bc20a3ff117709bb [ 677.249298] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.532188] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 88b40000cb224b228c5bdf54d1f10374 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 677.539043] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 5f0a47a72b1949b086b593d2159d71da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 677.548723] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f0a47a72b1949b086b593d2159d71da [ 677.563982] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88b40000cb224b228c5bdf54d1f10374 [ 677.569833] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 677.672415] env[62109]: DEBUG nova.compute.manager [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Received event network-changed-033d9bed-a864-43c3-8c4b-838811ea8db1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 677.672415] env[62109]: DEBUG nova.compute.manager [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Refreshing instance network info cache due to event network-changed-033d9bed-a864-43c3-8c4b-838811ea8db1. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 677.672415] env[62109]: DEBUG oslo_concurrency.lockutils [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] Acquiring lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.672415] env[62109]: DEBUG oslo_concurrency.lockutils [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] Acquired lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.672415] env[62109]: DEBUG nova.network.neutron [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Refreshing network info cache for port 033d9bed-a864-43c3-8c4b-838811ea8db1 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 677.672841] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] Expecting reply to msg d354b1569be0422889ed3fa90cc29269 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 677.678936] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d354b1569be0422889ed3fa90cc29269 [ 677.710914] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.711599] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 28a38dd23f4249fb843133e15b97cb43 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 677.719891] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28a38dd23f4249fb843133e15b97cb43 [ 677.983008] env[62109]: ERROR nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. [ 677.983008] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 677.983008] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.983008] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 677.983008] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 677.983008] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 677.983008] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 677.983008] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 677.983008] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.983008] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 677.983008] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.983008] env[62109]: ERROR nova.compute.manager raise self.value [ 677.983008] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 677.983008] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 677.983008] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.983008] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 677.983407] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.983407] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 677.983407] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. [ 677.983407] env[62109]: ERROR nova.compute.manager [ 677.983407] env[62109]: Traceback (most recent call last): [ 677.983407] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 677.983407] env[62109]: listener.cb(fileno) [ 677.983407] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.983407] env[62109]: result = function(*args, **kwargs) [ 677.983407] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 677.983407] env[62109]: return func(*args, **kwargs) [ 677.983407] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.983407] env[62109]: raise e [ 677.983407] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.983407] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 677.983407] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 677.983407] env[62109]: created_port_ids = self._update_ports_for_instance( [ 677.983407] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 677.983407] env[62109]: with excutils.save_and_reraise_exception(): [ 677.983407] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.983407] env[62109]: self.force_reraise() [ 677.983407] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.983407] env[62109]: raise self.value [ 677.983407] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 677.983407] env[62109]: updated_port = self._update_port( [ 677.983407] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.983407] env[62109]: _ensure_no_port_binding_failure(port) [ 677.983407] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.983407] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 677.985062] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. [ 677.985062] env[62109]: Removing descriptor: 16 [ 678.036235] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 678.060054] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 678.060540] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 678.060832] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.061169] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 678.061459] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.062030] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 678.062456] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 678.063188] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 678.063499] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 678.063911] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 678.064266] env[62109]: DEBUG nova.virt.hardware [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 678.065232] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91170ec6-0011-4ab5-a3fa-77a82cfd9945 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.076203] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d023f2-5222-4a7c-adfa-c90d3accf7d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.099392] env[62109]: ERROR nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Traceback (most recent call last): [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] yield resources [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self.driver.spawn(context, instance, image_meta, [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] vm_ref = self.build_virtual_machine(instance, [ 678.099392] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] for vif in network_info: [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] return self._sync_wrapper(fn, *args, **kwargs) [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self.wait() [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self[:] = self._gt.wait() [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] return self._exit_event.wait() [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 678.099736] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] current.throw(*self._exc) [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] result = function(*args, **kwargs) [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] return func(*args, **kwargs) [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] raise e [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] nwinfo = self.network_api.allocate_for_instance( [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] created_port_ids = self._update_ports_for_instance( [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] with excutils.save_and_reraise_exception(): [ 678.100053] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self.force_reraise() [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] raise self.value [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] updated_port = self._update_port( [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] _ensure_no_port_binding_failure(port) [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] raise exception.PortBindingFailed(port_id=port['id']) [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] nova.exception.PortBindingFailed: Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. [ 678.100448] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] [ 678.101558] env[62109]: INFO nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Terminating instance [ 678.105068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Acquiring lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.202863] env[62109]: DEBUG nova.network.neutron [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.216076] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Releasing lock "refresh_cache-80a15f60-4843-4a59-a6c1-0d5624609672" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.216498] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 678.216840] env[62109]: DEBUG nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 678.217146] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 678.231759] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.232578] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg b6df07de0b4f49efb6d4bce9311e0bfc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 678.241984] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6df07de0b4f49efb6d4bce9311e0bfc [ 678.351238] env[62109]: DEBUG nova.network.neutron [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.352030] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] Expecting reply to msg 6d69128c7d244a9a9542ade17e891bee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 678.363718] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d69128c7d244a9a9542ade17e891bee [ 678.468715] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec252e2-1e35-4ad7-b206-acd9cc31d458 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.476717] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4290f0e1-d08e-4d43-a05b-69bad3f56a5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.506752] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a073d02a-f8ee-4b04-9dc3-51ed553a43ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.513803] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5788e09-6ec5-4150-b620-60a5045164de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.526703] env[62109]: DEBUG nova.compute.provider_tree [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.527219] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 34fa131bbb3442918cf3a3fa6e2534d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 678.534708] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34fa131bbb3442918cf3a3fa6e2534d0 [ 678.735801] env[62109]: DEBUG nova.network.neutron [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.736433] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg e6dfbbdc6ffa4ca3954fd013d43c2587 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 678.744695] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6dfbbdc6ffa4ca3954fd013d43c2587 [ 678.857447] env[62109]: DEBUG oslo_concurrency.lockutils [req-53079b3b-36e3-4990-9af9-c9bee96f9f99 req-73b590b3-0c66-4a60-8ba0-f9ccdb7c135b service nova] Releasing lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.857957] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Acquired lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.858167] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 678.858606] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 5cb14599b48f4943bfe5769eb44ab704 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 678.865314] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cb14599b48f4943bfe5769eb44ab704 [ 679.029813] env[62109]: DEBUG nova.scheduler.client.report [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 679.032272] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg f01962b1c7c245f683d46bbd514ce79a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 679.043399] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f01962b1c7c245f683d46bbd514ce79a [ 679.238428] env[62109]: INFO nova.compute.manager [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] [instance: 80a15f60-4843-4a59-a6c1-0d5624609672] Took 1.02 seconds to deallocate network for instance. [ 679.240213] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg d838d088627e459e82d78e87d05f2aa9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 679.283875] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d838d088627e459e82d78e87d05f2aa9 [ 679.352513] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "a04d014e-bed6-4e4b-a5eb-316d88c174f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.352799] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "a04d014e-bed6-4e4b-a5eb-316d88c174f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.375525] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 679.461773] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.462356] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 3f01965ec8764c70b058be4a55aadc0a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 679.470244] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f01965ec8764c70b058be4a55aadc0a [ 679.534344] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.534954] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 679.536670] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 88f7a7d7018e46449915c9a50ec454bf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 679.537692] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.131s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.553149] env[62109]: INFO nova.compute.claims [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.554732] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 41ba9b653c514e73ba1044ea905fbaee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 679.573408] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88f7a7d7018e46449915c9a50ec454bf [ 679.586607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41ba9b653c514e73ba1044ea905fbaee [ 679.696376] env[62109]: DEBUG nova.compute.manager [req-5a97d1e1-cda0-4fe6-815b-232178d22281 req-8faad76e-1b5b-4948-bbc5-a172f0016140 service nova] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Received event network-vif-deleted-033d9bed-a864-43c3-8c4b-838811ea8db1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 679.746204] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 1d2e858857c1434489ba708793c290d1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 679.781355] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d2e858857c1434489ba708793c290d1 [ 679.964342] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Releasing lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.964778] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 679.964978] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 679.965275] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ece538bb-eae0-4d29-aebc-569a670bf67d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.974913] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb4e57c-7843-476b-827b-2a77a72d0d33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.996011] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 83a25ff9-cc7d-4917-95cc-e621884bcee8 could not be found. [ 679.996266] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 679.996447] env[62109]: INFO nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 679.996686] env[62109]: DEBUG oslo.service.loopingcall [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.996905] env[62109]: DEBUG nova.compute.manager [-] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.996999] env[62109]: DEBUG nova.network.neutron [-] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 680.013017] env[62109]: DEBUG nova.network.neutron [-] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.014272] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b3c40e50e9704cf68e752fc98347d246 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 680.021043] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3c40e50e9704cf68e752fc98347d246 [ 680.058597] env[62109]: DEBUG nova.compute.utils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 680.058597] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 92498161cf87445599ed8ec2f01d31a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 680.070762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg f3e93b4151cb435b9794d1edaa61e705 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 680.073571] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92498161cf87445599ed8ec2f01d31a5 [ 680.074013] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 680.074174] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 680.078312] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3e93b4151cb435b9794d1edaa61e705 [ 680.134695] env[62109]: DEBUG nova.policy [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13ba568bd2274469b24604d938cd3e9d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0564c0961c84ae29d228b3b98cc113a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 680.281335] env[62109]: INFO nova.scheduler.client.report [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Deleted allocations for instance 80a15f60-4843-4a59-a6c1-0d5624609672 [ 680.297333] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Expecting reply to msg 67c01b9764cd4be6922465671cc5b881 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 680.316451] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67c01b9764cd4be6922465671cc5b881 [ 680.514183] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Successfully created port: 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.516323] env[62109]: DEBUG nova.network.neutron [-] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.516701] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b93ae62fd8c441bb9183e915154899c7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 680.525054] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b93ae62fd8c441bb9183e915154899c7 [ 680.572536] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 680.574208] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 79dd04bd21d74528ae0a253a14bf8e26 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 680.610391] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79dd04bd21d74528ae0a253a14bf8e26 [ 680.803066] env[62109]: DEBUG oslo_concurrency.lockutils [None req-313e25df-76e7-442e-b13f-292f73b86b63 tempest-InstanceActionsTestJSON-1004074533 tempest-InstanceActionsTestJSON-1004074533-project-member] Lock "80a15f60-4843-4a59-a6c1-0d5624609672" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.762s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.803635] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg e53014077d1a49ab983554cd66326887 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 680.814450] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e53014077d1a49ab983554cd66326887 [ 680.987697] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ab8b8c-b44d-46dd-9ce2-c42ea7148843 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.987697] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1842e327-a676-46c6-89b2-8b2a0c50f5fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.015655] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ab313c-f650-455a-9074-de5acb0458c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.018321] env[62109]: INFO nova.compute.manager [-] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Took 1.02 seconds to deallocate network for instance. [ 681.020639] env[62109]: DEBUG nova.compute.claims [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 681.020838] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.025613] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb1cafb-8938-40c8-a4d4-0f2277d22acd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.039558] env[62109]: DEBUG nova.compute.provider_tree [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.040107] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 54284be644a44dd8a1d0d1a99437379d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 681.048220] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54284be644a44dd8a1d0d1a99437379d [ 681.078409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 833ed645fa0f4722a044b0010f7c35b7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 681.116308] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 833ed645fa0f4722a044b0010f7c35b7 [ 681.306134] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 681.307963] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 7ad6abf4e63043c195d3f4a719a2f4ef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 681.343353] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ad6abf4e63043c195d3f4a719a2f4ef [ 681.542564] env[62109]: DEBUG nova.scheduler.client.report [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 681.545041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg fe7f2ba1381645e3aa2a027b8d2c4605 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 681.556487] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe7f2ba1381645e3aa2a027b8d2c4605 [ 681.581201] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 681.608322] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 681.608574] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 681.608725] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.608900] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 681.609039] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.609177] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 681.609376] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 681.609528] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 681.609689] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 681.609844] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 681.610011] env[62109]: DEBUG nova.virt.hardware [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 681.610839] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2701cb90-f94a-40ce-bf6f-e982949710e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.618534] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65bf081-7847-4b2b-b8aa-05f1fdc62f5d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.800244] env[62109]: ERROR nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. [ 681.800244] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 681.800244] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.800244] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 681.800244] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.800244] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 681.800244] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.800244] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 681.800244] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.800244] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 681.800244] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.800244] env[62109]: ERROR nova.compute.manager raise self.value [ 681.800244] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.800244] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 681.800244] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.800244] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 681.800734] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.800734] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 681.800734] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. [ 681.800734] env[62109]: ERROR nova.compute.manager [ 681.800734] env[62109]: Traceback (most recent call last): [ 681.800734] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 681.800734] env[62109]: listener.cb(fileno) [ 681.800734] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.800734] env[62109]: result = function(*args, **kwargs) [ 681.800734] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 681.800734] env[62109]: return func(*args, **kwargs) [ 681.800734] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.800734] env[62109]: raise e [ 681.800734] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.800734] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 681.800734] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.800734] env[62109]: created_port_ids = self._update_ports_for_instance( [ 681.800734] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.800734] env[62109]: with excutils.save_and_reraise_exception(): [ 681.800734] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.800734] env[62109]: self.force_reraise() [ 681.800734] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.800734] env[62109]: raise self.value [ 681.800734] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.800734] env[62109]: updated_port = self._update_port( [ 681.800734] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.800734] env[62109]: _ensure_no_port_binding_failure(port) [ 681.800734] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.800734] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 681.801544] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. [ 681.801544] env[62109]: Removing descriptor: 16 [ 681.801544] env[62109]: ERROR nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Traceback (most recent call last): [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] yield resources [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self.driver.spawn(context, instance, image_meta, [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.801544] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] vm_ref = self.build_virtual_machine(instance, [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] for vif in network_info: [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] return self._sync_wrapper(fn, *args, **kwargs) [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self.wait() [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self[:] = self._gt.wait() [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] return self._exit_event.wait() [ 681.801996] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] result = hub.switch() [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] return self.greenlet.switch() [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] result = function(*args, **kwargs) [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] return func(*args, **kwargs) [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] raise e [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] nwinfo = self.network_api.allocate_for_instance( [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.802439] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] created_port_ids = self._update_ports_for_instance( [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] with excutils.save_and_reraise_exception(): [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self.force_reraise() [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] raise self.value [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] updated_port = self._update_port( [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] _ensure_no_port_binding_failure(port) [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.802833] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] raise exception.PortBindingFailed(port_id=port['id']) [ 681.803151] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] nova.exception.PortBindingFailed: Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. [ 681.803151] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] [ 681.803151] env[62109]: INFO nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Terminating instance [ 681.803705] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Acquiring lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.803861] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Acquired lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.804039] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 681.804521] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 53127e3caab345e09328d1c96a89b736 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 681.812227] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53127e3caab345e09328d1c96a89b736 [ 681.820509] env[62109]: DEBUG nova.compute.manager [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Received event network-changed-9b5b0d54-31e6-49e7-a134-cfc1557f1a7c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 681.820700] env[62109]: DEBUG nova.compute.manager [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Refreshing instance network info cache due to event network-changed-9b5b0d54-31e6-49e7-a134-cfc1557f1a7c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 681.820884] env[62109]: DEBUG oslo_concurrency.lockutils [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] Acquiring lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.831301] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.048870] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.048870] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 682.050103] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 596582102e7a4f6d91c4dbdef78f369d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 682.050631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.675s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.053302] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 739ea57f031a4425819a99d77a83f7e3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 682.085781] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 596582102e7a4f6d91c4dbdef78f369d [ 682.086554] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 739ea57f031a4425819a99d77a83f7e3 [ 682.322282] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.373857] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.374385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg eac4c690de814b458f4c1752ff375766 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 682.383758] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eac4c690de814b458f4c1752ff375766 [ 682.559239] env[62109]: DEBUG nova.compute.utils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 682.559854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 5cb247d3447241b3b6362980981546fd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 682.560968] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 682.561141] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 682.573036] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cb247d3447241b3b6362980981546fd [ 682.631812] env[62109]: DEBUG nova.policy [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe2d09b691d94a748f19caaf87f70e9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3d972fbe68a485ea9884936e907fae7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 682.876817] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Releasing lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.877388] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 682.877708] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 682.878132] env[62109]: DEBUG oslo_concurrency.lockutils [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] Acquired lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.878574] env[62109]: DEBUG nova.network.neutron [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Refreshing network info cache for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 682.879184] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] Expecting reply to msg 327a6ddec6644b1fa42ce96e78907f45 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 682.880396] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6f84151-3b9b-4a41-a6e7-102528a5edf6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.886246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 327a6ddec6644b1fa42ce96e78907f45 [ 682.891337] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee55762-1229-43c0-841b-c856675139b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.917108] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b537150e-9136-4fa4-b092-4f4995b918b7 could not be found. [ 682.917486] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 682.917772] env[62109]: INFO nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 682.918227] env[62109]: DEBUG oslo.service.loopingcall [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.920827] env[62109]: DEBUG nova.compute.manager [-] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 682.921041] env[62109]: DEBUG nova.network.neutron [-] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 682.955241] env[62109]: DEBUG nova.network.neutron [-] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.955769] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 25b2ca24f003419987859f3f20935ed0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 682.971362] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25b2ca24f003419987859f3f20935ed0 [ 683.003310] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584a618d-b2dd-4d23-9e77-6159567d67f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.011052] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23e2009-9fcc-4c5a-ab43-ae04ae1363d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.040345] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4639b536-f5c2-4c04-a076-421d42f6c094 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.045001] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Successfully created port: 8b1944f9-bd34-42ed-a99d-9be9225ffaf1 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 683.051990] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc727615-ab16-4d99-8ad0-3ecba07faf72 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.072040] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 683.073850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg d486e7ac02e94e26ba30adb8590b8325 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 683.075315] env[62109]: DEBUG nova.compute.provider_tree [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.076057] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg c74dbc2f8ee8481fb24bb2939cb3e6b1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 683.083441] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c74dbc2f8ee8481fb24bb2939cb3e6b1 [ 683.117491] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d486e7ac02e94e26ba30adb8590b8325 [ 683.415409] env[62109]: DEBUG nova.network.neutron [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 683.463198] env[62109]: DEBUG nova.network.neutron [-] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.463656] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 08498e2f04c64469ab2b10002711d217 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 683.472478] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08498e2f04c64469ab2b10002711d217 [ 683.478564] env[62109]: DEBUG nova.network.neutron [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.479109] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] Expecting reply to msg 7f6148d632114d71978e3705c03c10e3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 683.487268] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f6148d632114d71978e3705c03c10e3 [ 683.581141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg fb4fa02e52924bd38a274273394b86ed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 683.582785] env[62109]: DEBUG nova.scheduler.client.report [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.585146] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 9ea79906aa634ae7b2ce576703454f3f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 683.598809] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ea79906aa634ae7b2ce576703454f3f [ 683.627542] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb4fa02e52924bd38a274273394b86ed [ 683.847370] env[62109]: DEBUG nova.compute.manager [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Received event network-vif-deleted-9b5b0d54-31e6-49e7-a134-cfc1557f1a7c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 683.847620] env[62109]: DEBUG nova.compute.manager [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Received event network-changed-8b1944f9-bd34-42ed-a99d-9be9225ffaf1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 683.847785] env[62109]: DEBUG nova.compute.manager [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Refreshing instance network info cache due to event network-changed-8b1944f9-bd34-42ed-a99d-9be9225ffaf1. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 683.848082] env[62109]: DEBUG oslo_concurrency.lockutils [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] Acquiring lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.848230] env[62109]: DEBUG oslo_concurrency.lockutils [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] Acquired lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.848390] env[62109]: DEBUG nova.network.neutron [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Refreshing network info cache for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 683.848810] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] Expecting reply to msg acbf37d2cf5c42cbb67dc6180efaf199 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 683.858146] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acbf37d2cf5c42cbb67dc6180efaf199 [ 683.965660] env[62109]: INFO nova.compute.manager [-] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Took 1.04 seconds to deallocate network for instance. [ 683.968477] env[62109]: DEBUG nova.compute.claims [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 683.968668] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.981334] env[62109]: DEBUG oslo_concurrency.lockutils [req-9416edc5-712b-4cc0-b50e-398f3b0d3811 req-cdfa5d45-c8a4-41af-871b-0fb7b18f6d46 service nova] Releasing lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.999845] env[62109]: ERROR nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. [ 683.999845] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 683.999845] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 683.999845] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 683.999845] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 683.999845] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 683.999845] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 683.999845] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 683.999845] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 683.999845] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 683.999845] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 683.999845] env[62109]: ERROR nova.compute.manager raise self.value [ 683.999845] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 683.999845] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 683.999845] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 683.999845] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 684.000611] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.000611] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 684.000611] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. [ 684.000611] env[62109]: ERROR nova.compute.manager [ 684.000611] env[62109]: Traceback (most recent call last): [ 684.000611] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 684.000611] env[62109]: listener.cb(fileno) [ 684.000611] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.000611] env[62109]: result = function(*args, **kwargs) [ 684.000611] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 684.000611] env[62109]: return func(*args, **kwargs) [ 684.000611] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.000611] env[62109]: raise e [ 684.000611] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.000611] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 684.000611] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 684.000611] env[62109]: created_port_ids = self._update_ports_for_instance( [ 684.000611] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 684.000611] env[62109]: with excutils.save_and_reraise_exception(): [ 684.000611] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.000611] env[62109]: self.force_reraise() [ 684.000611] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.000611] env[62109]: raise self.value [ 684.000611] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 684.000611] env[62109]: updated_port = self._update_port( [ 684.000611] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.000611] env[62109]: _ensure_no_port_binding_failure(port) [ 684.000611] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.000611] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 684.001960] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. [ 684.001960] env[62109]: Removing descriptor: 16 [ 684.087517] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 684.090146] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.039s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.090748] env[62109]: ERROR nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] Traceback (most recent call last): [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self.driver.spawn(context, instance, image_meta, [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] vm_ref = self.build_virtual_machine(instance, [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.090748] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] for vif in network_info: [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] return self._sync_wrapper(fn, *args, **kwargs) [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self.wait() [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self[:] = self._gt.wait() [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] return self._exit_event.wait() [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] result = hub.switch() [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 684.091094] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] return self.greenlet.switch() [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] result = function(*args, **kwargs) [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] return func(*args, **kwargs) [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] raise e [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] nwinfo = self.network_api.allocate_for_instance( [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] created_port_ids = self._update_ports_for_instance( [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] with excutils.save_and_reraise_exception(): [ 684.091475] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] self.force_reraise() [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] raise self.value [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] updated_port = self._update_port( [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] _ensure_no_port_binding_failure(port) [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] raise exception.PortBindingFailed(port_id=port['id']) [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] nova.exception.PortBindingFailed: Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. [ 684.091977] env[62109]: ERROR nova.compute.manager [instance: 15e2e743-070f-4545-b976-ced38fd99198] [ 684.092512] env[62109]: DEBUG nova.compute.utils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 684.092838] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.179s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.094604] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg e1a82f48d31045848fe1d94046e04305 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 684.096063] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Build of instance 15e2e743-070f-4545-b976-ced38fd99198 was re-scheduled: Binding failed for port 530e4dd8-07d2-480d-9d84-700373a8d44a, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 684.096547] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 684.096771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Acquiring lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.096918] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Acquired lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.097076] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 684.097474] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 32c8d656d0a14323a65774278b72eaeb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 684.108071] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32c8d656d0a14323a65774278b72eaeb [ 684.123770] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 684.124043] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 684.124203] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.124387] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 684.124524] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.124660] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 684.124854] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 684.124999] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 684.125280] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 684.125519] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 684.125608] env[62109]: DEBUG nova.virt.hardware [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 684.126767] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92b0bde-b11e-4f33-8785-bfd930bf9614 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.134999] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45628c22-a396-470b-80b3-cf7fb62591d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.139628] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1a82f48d31045848fe1d94046e04305 [ 684.150738] env[62109]: ERROR nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Traceback (most recent call last): [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] yield resources [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self.driver.spawn(context, instance, image_meta, [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] vm_ref = self.build_virtual_machine(instance, [ 684.150738] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] for vif in network_info: [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] return self._sync_wrapper(fn, *args, **kwargs) [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self.wait() [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self[:] = self._gt.wait() [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] return self._exit_event.wait() [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 684.151073] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] current.throw(*self._exc) [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] result = function(*args, **kwargs) [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] return func(*args, **kwargs) [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] raise e [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] nwinfo = self.network_api.allocate_for_instance( [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] created_port_ids = self._update_ports_for_instance( [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] with excutils.save_and_reraise_exception(): [ 684.151516] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self.force_reraise() [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] raise self.value [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] updated_port = self._update_port( [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] _ensure_no_port_binding_failure(port) [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] raise exception.PortBindingFailed(port_id=port['id']) [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] nova.exception.PortBindingFailed: Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. [ 684.152049] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] [ 684.152049] env[62109]: INFO nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Terminating instance [ 684.158940] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Acquiring lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.376205] env[62109]: DEBUG nova.network.neutron [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.524555] env[62109]: DEBUG nova.network.neutron [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.525202] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] Expecting reply to msg 4418c65243cd44099b64532a058ec91e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 684.534794] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4418c65243cd44099b64532a058ec91e [ 684.620573] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.673786] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.674343] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 747d91b5f154479e85d5a8781344deb4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 684.682851] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 747d91b5f154479e85d5a8781344deb4 [ 685.027975] env[62109]: DEBUG oslo_concurrency.lockutils [req-364d23f6-3686-4076-8505-339363fd411c req-097bfca2-6be8-44f9-ac98-f123d7733f8d service nova] Releasing lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.028508] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Acquired lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.028655] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 685.029131] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 88e4f86df35d4431b1dbd4d84cc0fc6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 685.036892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88e4f86df35d4431b1dbd4d84cc0fc6d [ 685.054881] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e15421-f04b-49d2-9369-be29bc32d8c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.064340] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54640cd-d2e1-4528-86a9-7dc99f91d419 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.097752] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd05cae-e9e4-4949-a2b9-19aa4f60e1c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.105632] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111d66da-8348-4ff9-87d1-b3c06d4f5d89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.121421] env[62109]: DEBUG nova.compute.provider_tree [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.122107] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 5fe72527db664c719eafc4a4946aab94 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 685.131553] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fe72527db664c719eafc4a4946aab94 [ 685.176357] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Releasing lock "refresh_cache-15e2e743-070f-4545-b976-ced38fd99198" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.177569] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 685.177776] env[62109]: DEBUG nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 685.178027] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 685.193883] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.194466] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg a4383be8a711491387c52b38d2880594 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 685.203821] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4383be8a711491387c52b38d2880594 [ 685.353131] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "b95c60dc-50c4-4afc-acb0-3308e490b808" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.353398] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "b95c60dc-50c4-4afc-acb0-3308e490b808" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.552875] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.617124] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "900e1e1e-5635-4782-bd87-046dd2af7dad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.617525] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "900e1e1e-5635-4782-bd87-046dd2af7dad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.624467] env[62109]: DEBUG nova.scheduler.client.report [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 685.627285] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 80a8e456aa31431fa75dc2987d4f1fd5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 685.638988] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80a8e456aa31431fa75dc2987d4f1fd5 [ 685.660910] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.661591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg f04151bdb16b428288046f6ba42ae4dd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 685.670278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f04151bdb16b428288046f6ba42ae4dd [ 685.696959] env[62109]: DEBUG nova.network.neutron [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.697629] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg b4defc8ba9dd4bc3a08661dc7ef0770e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 685.706324] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4defc8ba9dd4bc3a08661dc7ef0770e [ 685.879262] env[62109]: DEBUG nova.compute.manager [req-159dad56-6e48-4a34-9750-19a689ff12f3 req-8b701881-8c3c-4754-b333-4ecd9d68f0ba service nova] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Received event network-vif-deleted-8b1944f9-bd34-42ed-a99d-9be9225ffaf1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 686.148109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.038s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.148109] env[62109]: ERROR nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. [ 686.148109] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Traceback (most recent call last): [ 686.148109] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 686.148109] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self.driver.spawn(context, instance, image_meta, [ 686.148109] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 686.148109] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.148109] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 686.148109] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] vm_ref = self.build_virtual_machine(instance, [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] vif_infos = vmwarevif.get_vif_info(self._session, [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] for vif in network_info: [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] return self._sync_wrapper(fn, *args, **kwargs) [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self.wait() [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self[:] = self._gt.wait() [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] return self._exit_event.wait() [ 686.148843] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] result = hub.switch() [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] return self.greenlet.switch() [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] result = function(*args, **kwargs) [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] return func(*args, **kwargs) [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] raise e [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] nwinfo = self.network_api.allocate_for_instance( [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 686.149203] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] created_port_ids = self._update_ports_for_instance( [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] with excutils.save_and_reraise_exception(): [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] self.force_reraise() [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] raise self.value [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] updated_port = self._update_port( [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] _ensure_no_port_binding_failure(port) [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 686.149574] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] raise exception.PortBindingFailed(port_id=port['id']) [ 686.149878] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] nova.exception.PortBindingFailed: Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. [ 686.149878] env[62109]: ERROR nova.compute.manager [instance: 2fa640c2-b433-4581-be4b-0673c1451043] [ 686.149878] env[62109]: DEBUG nova.compute.utils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 686.149878] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.056s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.149878] env[62109]: INFO nova.compute.claims [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.149878] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg e52df6a128624e9b935c64a78851e5ce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 686.152964] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Build of instance 2fa640c2-b433-4581-be4b-0673c1451043 was re-scheduled: Binding failed for port 59e1287a-ba70-419b-a589-4ff8cd85c074, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 686.152964] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 686.152964] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Acquiring lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.152964] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Acquired lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.153295] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 686.153295] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg f97ad67258d542bf8f1846dda5cea21b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 686.161659] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f97ad67258d542bf8f1846dda5cea21b [ 686.170429] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Releasing lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.171002] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 686.171306] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 686.171911] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f14873d-f581-4398-963d-1c42fe685783 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.176248] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e52df6a128624e9b935c64a78851e5ce [ 686.181004] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e21c52-846c-447e-b463-eeb969afbd1d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.203503] env[62109]: INFO nova.compute.manager [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] [instance: 15e2e743-070f-4545-b976-ced38fd99198] Took 1.03 seconds to deallocate network for instance. [ 686.205497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 5f5b9ad32c7648b582ba9b6cf92dd85f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 686.206788] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fe9756ba-0eb5-41ad-913f-e933f97542cb could not be found. [ 686.207109] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 686.207400] env[62109]: INFO nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 686.207755] env[62109]: DEBUG oslo.service.loopingcall [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.208485] env[62109]: DEBUG nova.compute.manager [-] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 686.208704] env[62109]: DEBUG nova.network.neutron [-] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 686.222563] env[62109]: DEBUG nova.network.neutron [-] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.223241] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4b48e98d83d64311ac8d5d664c4b450a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 686.229846] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b48e98d83d64311ac8d5d664c4b450a [ 686.241523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f5b9ad32c7648b582ba9b6cf92dd85f [ 686.653791] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg f4263a6c201b4997b7c0019b9e25290f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 686.662824] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4263a6c201b4997b7c0019b9e25290f [ 686.672861] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.714546] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg b3ca84e103bd4842a3e66249d697281a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 686.725576] env[62109]: DEBUG nova.network.neutron [-] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.726218] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1fffb6afeaa247e18d306301905fbeb2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 686.735801] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fffb6afeaa247e18d306301905fbeb2 [ 686.739768] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3ca84e103bd4842a3e66249d697281a [ 686.756462] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.757104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 33ddd903211a47bab330aa8fdd2583ff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 686.764621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33ddd903211a47bab330aa8fdd2583ff [ 687.229547] env[62109]: INFO nova.compute.manager [-] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Took 1.02 seconds to deallocate network for instance. [ 687.230288] env[62109]: DEBUG nova.compute.claims [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 687.230605] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.232485] env[62109]: INFO nova.scheduler.client.report [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Deleted allocations for instance 15e2e743-070f-4545-b976-ced38fd99198 [ 687.240591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Expecting reply to msg 687798b0b1f84d5494b9af8dc81516b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 687.258735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Releasing lock "refresh_cache-2fa640c2-b433-4581-be4b-0673c1451043" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.259150] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 687.259455] env[62109]: DEBUG nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 687.259747] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 687.262867] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 687798b0b1f84d5494b9af8dc81516b6 [ 687.275464] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.276295] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 97c9b67710d54d78925e09b3f1211870 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 687.290766] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97c9b67710d54d78925e09b3f1211870 [ 687.588249] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46c61e5-1279-42a4-bfe7-5c10168e27e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.595710] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6544473-33e3-4c5d-811b-a9da443d1827 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.625541] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8cc024-259d-436d-94f4-271292b4e327 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.632657] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80faa826-6198-4160-9acc-63893db0458e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.646396] env[62109]: DEBUG nova.compute.provider_tree [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.646902] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 1436b93562ac45338d5e38ec3d219245 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 687.653701] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1436b93562ac45338d5e38ec3d219245 [ 687.744558] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39d251b7-0311-4e19-ba29-53bcbff7e5f0 tempest-ServerExternalEventsTest-1156907895 tempest-ServerExternalEventsTest-1156907895-project-member] Lock "15e2e743-070f-4545-b976-ced38fd99198" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.570s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.745164] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 9141b63135244cc5afb571297cdc1ab3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 687.755017] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9141b63135244cc5afb571297cdc1ab3 [ 687.784034] env[62109]: DEBUG nova.network.neutron [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.784034] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg ea3998223e654192b98837c4d0efadfc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 687.791300] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea3998223e654192b98837c4d0efadfc [ 688.149653] env[62109]: DEBUG nova.scheduler.client.report [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 688.152079] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg abc1fdcaf2114b8cbf284e2b2bc4eab7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 688.163784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abc1fdcaf2114b8cbf284e2b2bc4eab7 [ 688.247234] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 688.249090] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 5f1535f3f5bf40c9bf8f4d0bba80439e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 688.285807] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f1535f3f5bf40c9bf8f4d0bba80439e [ 688.286696] env[62109]: INFO nova.compute.manager [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] [instance: 2fa640c2-b433-4581-be4b-0673c1451043] Took 1.03 seconds to deallocate network for instance. [ 688.288185] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 0f3e90f55cdf402a83ed3350f210e494 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 688.320734] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f3e90f55cdf402a83ed3350f210e494 [ 688.656046] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.656046] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 688.656786] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 706c4196bf6e421893f68eb52cfa8c5f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 688.657843] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.089s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.659265] env[62109]: INFO nova.compute.claims [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.660867] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg cd4ba810aede48c1949ebb5c3ed799ab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 688.697558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 706c4196bf6e421893f68eb52cfa8c5f [ 688.697558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd4ba810aede48c1949ebb5c3ed799ab [ 688.771067] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.793227] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 74fdecaf26b04964aa21690dd7b5a73a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 688.824714] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74fdecaf26b04964aa21690dd7b5a73a [ 689.164460] env[62109]: DEBUG nova.compute.utils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.165108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 8e6cb1f9825044438261546ac3fabbcc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 689.167595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 87b91c4944514352b0e77f2ce77652da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 689.168103] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 689.168277] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 689.174675] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e6cb1f9825044438261546ac3fabbcc [ 689.180919] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87b91c4944514352b0e77f2ce77652da [ 689.219143] env[62109]: DEBUG nova.policy [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69c1f9cac1b84ea18a4065e389324d97', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4678fb5a620741c1b27309ff6f4597eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 689.321134] env[62109]: INFO nova.scheduler.client.report [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Deleted allocations for instance 2fa640c2-b433-4581-be4b-0673c1451043 [ 689.327316] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Expecting reply to msg 0c98d1ce76124bbca5132ef81105c31a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 689.344473] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c98d1ce76124bbca5132ef81105c31a [ 689.530173] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Successfully created port: b1bc0a4e-9fde-485b-9637-ec9f81514cd0 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.669538] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 689.671398] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 110c2f5d824b4e3888a30bb7dac42cfb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 689.705986] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 110c2f5d824b4e3888a30bb7dac42cfb [ 689.829628] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0db3a869-c29c-41b3-92ea-8e7694544b1e tempest-AttachInterfacesV270Test-560058959 tempest-AttachInterfacesV270Test-560058959-project-member] Lock "2fa640c2-b433-4581-be4b-0673c1451043" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.553s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.830265] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 83553e28ec5e413d98e9910d5bf23c87 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 689.840689] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83553e28ec5e413d98e9910d5bf23c87 [ 690.096376] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940a17e7-406e-4487-9191-2557a87ea7a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.104793] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1362439-f5ba-44ce-b7f4-e40dc07053b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.140537] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52de2ae-5529-44ae-a369-9592823cd910 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.145020] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8579b60-12a4-4a5e-b1fc-17d2669faf9d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.158963] env[62109]: DEBUG nova.compute.provider_tree [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.159513] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg e6a63711c3524d319c47014b89a122ad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 690.167937] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6a63711c3524d319c47014b89a122ad [ 690.185784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 92fae05663bc447cac300d9164f1b423 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 690.226790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92fae05663bc447cac300d9164f1b423 [ 690.333160] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 690.334869] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 61fdadd8ff694cc3ae31c29b9ec5ffa2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 690.366568] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61fdadd8ff694cc3ae31c29b9ec5ffa2 [ 690.411794] env[62109]: DEBUG nova.compute.manager [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Received event network-changed-b1bc0a4e-9fde-485b-9637-ec9f81514cd0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 690.412083] env[62109]: DEBUG nova.compute.manager [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Refreshing instance network info cache due to event network-changed-b1bc0a4e-9fde-485b-9637-ec9f81514cd0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 690.412231] env[62109]: DEBUG oslo_concurrency.lockutils [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] Acquiring lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.412380] env[62109]: DEBUG oslo_concurrency.lockutils [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] Acquired lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.412528] env[62109]: DEBUG nova.network.neutron [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Refreshing network info cache for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 690.413019] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] Expecting reply to msg cc0bbfc2f2094c3f9be99ae5a66ef706 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 690.420874] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc0bbfc2f2094c3f9be99ae5a66ef706 [ 690.641717] env[62109]: ERROR nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. [ 690.641717] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 690.641717] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.641717] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 690.641717] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 690.641717] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 690.641717] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 690.641717] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 690.641717] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.641717] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 690.641717] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.641717] env[62109]: ERROR nova.compute.manager raise self.value [ 690.641717] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 690.641717] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 690.641717] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.641717] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 690.642214] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.642214] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 690.642214] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. [ 690.642214] env[62109]: ERROR nova.compute.manager [ 690.642214] env[62109]: Traceback (most recent call last): [ 690.642214] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 690.642214] env[62109]: listener.cb(fileno) [ 690.642214] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.642214] env[62109]: result = function(*args, **kwargs) [ 690.642214] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 690.642214] env[62109]: return func(*args, **kwargs) [ 690.642214] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 690.642214] env[62109]: raise e [ 690.642214] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.642214] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 690.642214] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 690.642214] env[62109]: created_port_ids = self._update_ports_for_instance( [ 690.642214] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 690.642214] env[62109]: with excutils.save_and_reraise_exception(): [ 690.642214] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.642214] env[62109]: self.force_reraise() [ 690.642214] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.642214] env[62109]: raise self.value [ 690.642214] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 690.642214] env[62109]: updated_port = self._update_port( [ 690.642214] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.642214] env[62109]: _ensure_no_port_binding_failure(port) [ 690.642214] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.642214] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 690.642992] env[62109]: nova.exception.PortBindingFailed: Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. [ 690.642992] env[62109]: Removing descriptor: 16 [ 690.662071] env[62109]: DEBUG nova.scheduler.client.report [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 690.664543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 3e979c87d2324800a7c85aadc0042911 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 690.677455] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e979c87d2324800a7c85aadc0042911 [ 690.689122] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 690.716909] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.717059] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.717211] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.717384] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.717523] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.717663] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.717864] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.718065] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.718297] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.718387] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.718553] env[62109]: DEBUG nova.virt.hardware [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.719432] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f042bcc-ab05-4cc0-ad48-72378e96f743 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.726689] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54afa8e1-a48a-4140-bd08-62da67f45f1c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.742984] env[62109]: ERROR nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Traceback (most recent call last): [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] yield resources [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self.driver.spawn(context, instance, image_meta, [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] vm_ref = self.build_virtual_machine(instance, [ 690.742984] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] vif_infos = vmwarevif.get_vif_info(self._session, [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] for vif in network_info: [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] return self._sync_wrapper(fn, *args, **kwargs) [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self.wait() [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self[:] = self._gt.wait() [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] return self._exit_event.wait() [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 690.743361] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] current.throw(*self._exc) [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] result = function(*args, **kwargs) [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] return func(*args, **kwargs) [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] raise e [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] nwinfo = self.network_api.allocate_for_instance( [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] created_port_ids = self._update_ports_for_instance( [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] with excutils.save_and_reraise_exception(): [ 690.743710] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self.force_reraise() [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] raise self.value [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] updated_port = self._update_port( [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] _ensure_no_port_binding_failure(port) [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] raise exception.PortBindingFailed(port_id=port['id']) [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] nova.exception.PortBindingFailed: Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. [ 690.744069] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] [ 690.744069] env[62109]: INFO nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Terminating instance [ 690.745627] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Acquiring lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.861683] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.964964] env[62109]: DEBUG nova.network.neutron [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.042215] env[62109]: DEBUG nova.network.neutron [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.042903] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] Expecting reply to msg 4d1a8a8aed17449da7a88d458e73ed4a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 691.052587] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d1a8a8aed17449da7a88d458e73ed4a [ 691.167126] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.167673] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 691.169569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 8f791dd5d40242fc841a4c7e826f88be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 691.171840] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.644s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.174430] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 174b0610e2864c3ba1e5e1bb37a9bc3c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 691.208450] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f791dd5d40242fc841a4c7e826f88be [ 691.214015] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 174b0610e2864c3ba1e5e1bb37a9bc3c [ 691.548144] env[62109]: DEBUG oslo_concurrency.lockutils [req-2220b870-c568-4a30-8f98-6456a90e07f6 req-cb9d5043-3791-4f04-9169-9e1e262668ef service nova] Releasing lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.548144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Acquired lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.548144] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 691.548144] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg fd6ef83392474fa38ad83f43ebd04e27 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 691.553295] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd6ef83392474fa38ad83f43ebd04e27 [ 691.681495] env[62109]: DEBUG nova.compute.utils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 691.682112] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 4f37b96852274efe82aa6c7fab621d2a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 691.683287] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 691.683364] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 691.693014] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f37b96852274efe82aa6c7fab621d2a [ 691.897427] env[62109]: DEBUG nova.policy [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23f9946393284244aca0c29201c37fa4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21cdf8675fb347c2874d912dcb8ac002', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 692.061917] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 692.178419] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110f947e-33ff-45f8-b2c8-eaa0223d2ea7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.187571] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 692.189490] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 33d26db894b94fe889489e2842e9cda6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 692.192056] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad94a4e-68a7-460e-b6f5-7b74fff18b9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.238629] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.239203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 5f33b3f321f544ff9a49fa8b1d6907b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 692.241111] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f591ab40-c647-44dc-8a3e-569f36133246 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.244116] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33d26db894b94fe889489e2842e9cda6 [ 692.249896] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f33b3f321f544ff9a49fa8b1d6907b8 [ 692.252759] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9f3a55-0391-40dc-954e-8115e6310bf1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.271308] env[62109]: DEBUG nova.compute.provider_tree [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.271814] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg aa848f1feb1a4c07abef52baa351a840 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 692.280408] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa848f1feb1a4c07abef52baa351a840 [ 692.515060] env[62109]: DEBUG nova.compute.manager [req-af6a907b-6f6b-4519-83e1-5c5559b8c82f req-c7c1b69f-95f6-4d94-aadd-1842b84e9fcb service nova] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Received event network-vif-deleted-b1bc0a4e-9fde-485b-9637-ec9f81514cd0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 692.700083] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 325685a7dc4f483186aa1de26f54ed51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 692.701753] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Successfully created port: 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 692.738756] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 325685a7dc4f483186aa1de26f54ed51 [ 692.741811] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Releasing lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.742446] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 692.742446] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 692.742900] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc789add-0428-4e9d-bcf4-16461e0eed83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.754228] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec28abe-6ae9-4048-8b8b-2211a5fb135b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.776961] env[62109]: DEBUG nova.scheduler.client.report [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 692.779413] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 29ac3c4afdee4aeda1a1471d265dca9b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 692.780541] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 11a6eaa1-0d35-49cf-9341-b74129cf087b could not be found. [ 692.780736] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 692.780938] env[62109]: INFO nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 692.781178] env[62109]: DEBUG oslo.service.loopingcall [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.781878] env[62109]: DEBUG nova.compute.manager [-] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 692.781878] env[62109]: DEBUG nova.network.neutron [-] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 692.815144] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29ac3c4afdee4aeda1a1471d265dca9b [ 692.982378] env[62109]: DEBUG nova.network.neutron [-] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 692.982867] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b4bc9533ced844259825462d4596b99f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 692.992423] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4bc9533ced844259825462d4596b99f [ 693.207011] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 693.236092] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 693.236341] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 693.236458] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 693.236686] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 693.237467] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 693.237467] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 693.237467] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 693.237467] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 693.237467] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 693.237684] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 693.237717] env[62109]: DEBUG nova.virt.hardware [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 693.238786] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100b8b42-c22a-4949-ae57-83036278033a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.246839] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5166c5-28cd-4ba3-b20a-a54dc3203fd9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.283023] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.112s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.283836] env[62109]: ERROR nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Traceback (most recent call last): [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self.driver.spawn(context, instance, image_meta, [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] vm_ref = self.build_virtual_machine(instance, [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] vif_infos = vmwarevif.get_vif_info(self._session, [ 693.283836] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] for vif in network_info: [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] return self._sync_wrapper(fn, *args, **kwargs) [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self.wait() [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self[:] = self._gt.wait() [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] return self._exit_event.wait() [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] current.throw(*self._exc) [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.284209] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] result = function(*args, **kwargs) [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] return func(*args, **kwargs) [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] raise e [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] nwinfo = self.network_api.allocate_for_instance( [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] created_port_ids = self._update_ports_for_instance( [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] with excutils.save_and_reraise_exception(): [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] self.force_reraise() [ 693.284588] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] raise self.value [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] updated_port = self._update_port( [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] _ensure_no_port_binding_failure(port) [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] raise exception.PortBindingFailed(port_id=port['id']) [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] nova.exception.PortBindingFailed: Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. [ 693.284939] env[62109]: ERROR nova.compute.manager [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] [ 693.284939] env[62109]: DEBUG nova.compute.utils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 693.285885] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.037s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.287616] env[62109]: INFO nova.compute.claims [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.291899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 0e3ba5cce7d8493ba59ea5004666c4cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 693.293244] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Build of instance 535045d9-108e-4e88-82f0-9da98f2f55a6 was re-scheduled: Binding failed for port d1298831-ff8d-4c39-806b-3bb7c2472352, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 693.293713] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 693.293944] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.294086] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.294238] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 693.294617] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 35356242c27242f6a917de1ff944e57b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 693.302768] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35356242c27242f6a917de1ff944e57b [ 693.341811] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e3ba5cce7d8493ba59ea5004666c4cd [ 693.485434] env[62109]: DEBUG nova.network.neutron [-] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.485890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 276c0f9d5ac843968f3fd5a90444394f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 693.494779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 276c0f9d5ac843968f3fd5a90444394f [ 693.800897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg cd1a82d8d10b4cde829b7564f3190d8d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 693.808644] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd1a82d8d10b4cde829b7564f3190d8d [ 693.819619] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 693.912697] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.912697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 64bd9af8d0e9460d8a06b431b7842a1e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 693.921262] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64bd9af8d0e9460d8a06b431b7842a1e [ 693.952774] env[62109]: ERROR nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. [ 693.952774] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 693.952774] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.952774] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 693.952774] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 693.952774] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 693.952774] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 693.952774] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 693.952774] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.952774] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 693.952774] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.952774] env[62109]: ERROR nova.compute.manager raise self.value [ 693.952774] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 693.952774] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 693.952774] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.952774] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 693.953246] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.953246] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 693.953246] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. [ 693.953246] env[62109]: ERROR nova.compute.manager [ 693.953246] env[62109]: Traceback (most recent call last): [ 693.953246] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 693.953246] env[62109]: listener.cb(fileno) [ 693.953246] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.953246] env[62109]: result = function(*args, **kwargs) [ 693.953246] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 693.953246] env[62109]: return func(*args, **kwargs) [ 693.953246] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 693.953246] env[62109]: raise e [ 693.953246] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.953246] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 693.953246] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 693.953246] env[62109]: created_port_ids = self._update_ports_for_instance( [ 693.953246] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 693.953246] env[62109]: with excutils.save_and_reraise_exception(): [ 693.953246] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.953246] env[62109]: self.force_reraise() [ 693.953246] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.953246] env[62109]: raise self.value [ 693.953246] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 693.953246] env[62109]: updated_port = self._update_port( [ 693.953246] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.953246] env[62109]: _ensure_no_port_binding_failure(port) [ 693.953246] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.953246] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 693.954058] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. [ 693.954058] env[62109]: Removing descriptor: 16 [ 693.954058] env[62109]: ERROR nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Traceback (most recent call last): [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] yield resources [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self.driver.spawn(context, instance, image_meta, [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 693.954058] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] vm_ref = self.build_virtual_machine(instance, [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] for vif in network_info: [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] return self._sync_wrapper(fn, *args, **kwargs) [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self.wait() [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self[:] = self._gt.wait() [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] return self._exit_event.wait() [ 693.954406] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] result = hub.switch() [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] return self.greenlet.switch() [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] result = function(*args, **kwargs) [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] return func(*args, **kwargs) [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] raise e [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] nwinfo = self.network_api.allocate_for_instance( [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 693.954788] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] created_port_ids = self._update_ports_for_instance( [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] with excutils.save_and_reraise_exception(): [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self.force_reraise() [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] raise self.value [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] updated_port = self._update_port( [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] _ensure_no_port_binding_failure(port) [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.955153] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] raise exception.PortBindingFailed(port_id=port['id']) [ 693.955525] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] nova.exception.PortBindingFailed: Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. [ 693.955525] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] [ 693.955525] env[62109]: INFO nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Terminating instance [ 693.962663] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.962663] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquired lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.962663] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 693.962663] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 5c066a67b6b844a4b6b59f06a98f9280 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 693.963829] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c066a67b6b844a4b6b59f06a98f9280 [ 693.989601] env[62109]: INFO nova.compute.manager [-] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Took 1.21 seconds to deallocate network for instance. [ 693.990884] env[62109]: DEBUG nova.compute.claims [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 693.990884] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.211983] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "afc5587e-7fd5-4b07-aff8-98ef8358985f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.212238] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "afc5587e-7fd5-4b07-aff8-98ef8358985f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.413790] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-535045d9-108e-4e88-82f0-9da98f2f55a6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.414197] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 694.414785] env[62109]: DEBUG nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 694.415116] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 694.440018] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 694.440657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 94082b71db484debbd812d7ba21bdd1f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 694.451817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94082b71db484debbd812d7ba21bdd1f [ 694.480670] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 694.542904] env[62109]: DEBUG nova.compute.manager [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Received event network-changed-2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 694.543103] env[62109]: DEBUG nova.compute.manager [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Refreshing instance network info cache due to event network-changed-2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 694.543287] env[62109]: DEBUG oslo_concurrency.lockutils [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] Acquiring lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.586231] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.586792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg fe6a5715de114a8e8c2722ec1993cc46 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 694.595759] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe6a5715de114a8e8c2722ec1993cc46 [ 694.713850] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33223b8-216b-4192-bc8f-fd7aa61fff11 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.721626] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2edf34e-b707-4e28-8162-5cd044aa0b47 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.750705] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a628bde-1f9a-4475-9c03-92cef5a861fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.758466] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25091474-c0ec-4509-b4ea-d5da03bfd185 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.772304] env[62109]: DEBUG nova.compute.provider_tree [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.772830] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 0c9a1b9bcda440aa9cae0068f048bfd9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 694.779547] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c9a1b9bcda440aa9cae0068f048bfd9 [ 694.944875] env[62109]: DEBUG nova.network.neutron [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.945301] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 67e4bd3089a94b17b43182b713e7a1c8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 694.953654] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67e4bd3089a94b17b43182b713e7a1c8 [ 695.089643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Releasing lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.090130] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 695.090349] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 695.090657] env[62109]: DEBUG oslo_concurrency.lockutils [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] Acquired lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.090893] env[62109]: DEBUG nova.network.neutron [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Refreshing network info cache for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 695.091251] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] Expecting reply to msg 72c56a043a7d46bb8a8b263cbab65ffe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 695.092058] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-493b2e45-efec-47e8-91ac-b879c18c2608 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.101251] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce799c34-6e46-48ec-8f01-ad83218ee6c5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.111692] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72c56a043a7d46bb8a8b263cbab65ffe [ 695.123230] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a4e1dcc-610f-4037-94e9-c9815c12ed1d could not be found. [ 695.123463] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 695.123637] env[62109]: INFO nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 695.123871] env[62109]: DEBUG oslo.service.loopingcall [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 695.124099] env[62109]: DEBUG nova.compute.manager [-] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 695.124186] env[62109]: DEBUG nova.network.neutron [-] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 695.173106] env[62109]: DEBUG nova.network.neutron [-] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 695.173657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d6171a56013246889075ce8fe0abcdb0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 695.185018] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6171a56013246889075ce8fe0abcdb0 [ 695.275271] env[62109]: DEBUG nova.scheduler.client.report [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 695.277807] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 68da945ad3cb4053b85d16af270b465b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 695.288394] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68da945ad3cb4053b85d16af270b465b [ 695.448379] env[62109]: INFO nova.compute.manager [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 535045d9-108e-4e88-82f0-9da98f2f55a6] Took 1.03 seconds to deallocate network for instance. [ 695.450295] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 3a4b9bcaa08647bd9e42d98baf50f6f3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 695.500141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a4b9bcaa08647bd9e42d98baf50f6f3 [ 695.610546] env[62109]: DEBUG nova.network.neutron [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 695.675799] env[62109]: DEBUG nova.network.neutron [-] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.676362] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 86f0db3f50344531afaa4ee1d0a398a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 695.684502] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86f0db3f50344531afaa4ee1d0a398a9 [ 695.699345] env[62109]: DEBUG nova.network.neutron [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.699877] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] Expecting reply to msg 64dac180c2ae4db5bf54c37fe9b2c191 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 695.708577] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64dac180c2ae4db5bf54c37fe9b2c191 [ 695.780866] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.781372] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 695.783019] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 6bad4c7da8644ebdba79a0a02cdaa72e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 695.785385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.764s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.786512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 284bbf4c5b674c1e9bcc363584a295e4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 695.813941] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bad4c7da8644ebdba79a0a02cdaa72e [ 695.821002] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 284bbf4c5b674c1e9bcc363584a295e4 [ 695.956782] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 7ba11c468c5e43f5816e1ff340f44f62 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 696.003517] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ba11c468c5e43f5816e1ff340f44f62 [ 696.179300] env[62109]: INFO nova.compute.manager [-] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Took 1.05 seconds to deallocate network for instance. [ 696.181860] env[62109]: DEBUG nova.compute.claims [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 696.182051] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.202628] env[62109]: DEBUG oslo_concurrency.lockutils [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] Releasing lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.202628] env[62109]: DEBUG nova.compute.manager [req-325fe01b-cd6a-4ea2-9d39-00857051cc52 req-d6fe2019-e3dd-427e-944b-96eeb390d73a service nova] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Received event network-vif-deleted-2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 696.286307] env[62109]: DEBUG nova.compute.utils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.286984] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg d02734c3fa9c4e4fa968ba647bea391f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 696.287965] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 696.288165] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 696.301825] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d02734c3fa9c4e4fa968ba647bea391f [ 696.383454] env[62109]: DEBUG nova.policy [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba5969be1a254281b4dffd81aa84ae7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa59611b27bb41beb282e68ccfa6fadc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 696.484723] env[62109]: INFO nova.scheduler.client.report [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Deleted allocations for instance 535045d9-108e-4e88-82f0-9da98f2f55a6 [ 696.490965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 14ee7432e05c41cfa7de05f05c329a37 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 696.507104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14ee7432e05c41cfa7de05f05c329a37 [ 696.793654] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 696.795439] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 530fbe2aa29846289114323677d6f6a3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 696.844157] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 530fbe2aa29846289114323677d6f6a3 [ 696.874221] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Successfully created port: 0dbb80c9-2197-47a4-942f-1135d1c701d1 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.889331] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed372c3-3b9a-4240-adf8-eb13b8c6d276 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.897764] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c8efa0-1167-4b17-a817-695b5fba6300 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.928730] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3001fee4-6e4c-4d41-b9fd-d82a7e085f36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.936048] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7b670d-8ae0-4212-af18-e8806def8645 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.949337] env[62109]: DEBUG nova.compute.provider_tree [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.949917] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg ce41eadf9a0e4710a70d824887aac8f9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 696.956905] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce41eadf9a0e4710a70d824887aac8f9 [ 696.993147] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6a435c00-b673-481f-90a8-c507ab0df46a tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "535045d9-108e-4e88-82f0-9da98f2f55a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.449s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.993818] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg e6af8050834c41178f55f23beb7f77e9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 697.005639] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6af8050834c41178f55f23beb7f77e9 [ 697.300839] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 790c99d8743b47149f53a12a65e2229b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 697.340438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 790c99d8743b47149f53a12a65e2229b [ 697.452503] env[62109]: DEBUG nova.scheduler.client.report [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.454907] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 84a7e247d50f4664853e89dc006b1b04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 697.466189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84a7e247d50f4664853e89dc006b1b04 [ 697.497458] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 697.499465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg a5f2e51e559f42a69fbbf2ed52d7863e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 697.551378] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5f2e51e559f42a69fbbf2ed52d7863e [ 697.714464] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Acquiring lock "47b83dbe-d7d8-4875-bb79-95a8fecf4028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.714683] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Lock "47b83dbe-d7d8-4875-bb79-95a8fecf4028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.803865] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 697.835904] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 697.836197] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 697.836294] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.836465] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 697.836598] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.836733] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 697.836927] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 697.837074] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 697.837228] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 697.837379] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 697.837540] env[62109]: DEBUG nova.virt.hardware [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.838517] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d40dce-405b-4c46-ae04-4684e6250ca8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.846685] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020edd77-b630-41f4-8a31-381a298b0ec3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.958030] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.173s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.958838] env[62109]: ERROR nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Traceback (most recent call last): [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self.driver.spawn(context, instance, image_meta, [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] vm_ref = self.build_virtual_machine(instance, [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.958838] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] for vif in network_info: [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] return self._sync_wrapper(fn, *args, **kwargs) [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self.wait() [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self[:] = self._gt.wait() [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] return self._exit_event.wait() [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] current.throw(*self._exc) [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.959267] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] result = function(*args, **kwargs) [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] return func(*args, **kwargs) [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] raise e [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] nwinfo = self.network_api.allocate_for_instance( [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] created_port_ids = self._update_ports_for_instance( [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] with excutils.save_and_reraise_exception(): [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] self.force_reraise() [ 697.959612] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] raise self.value [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] updated_port = self._update_port( [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] _ensure_no_port_binding_failure(port) [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] raise exception.PortBindingFailed(port_id=port['id']) [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] nova.exception.PortBindingFailed: Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. [ 697.959954] env[62109]: ERROR nova.compute.manager [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] [ 697.959954] env[62109]: DEBUG nova.compute.utils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.962019] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Build of instance 83a25ff9-cc7d-4917-95cc-e621884bcee8 was re-scheduled: Binding failed for port 033d9bed-a864-43c3-8c4b-838811ea8db1, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 697.962455] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 697.962787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Acquiring lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.963345] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Acquired lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.963530] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 697.963956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 3d954443deb54af79d8db16145fbb0dd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 697.971993] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d954443deb54af79d8db16145fbb0dd [ 697.972878] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.141s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.975747] env[62109]: INFO nova.compute.claims [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.977746] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 7fcdc1e449e44eb1a52d8aa123f2bc11 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 697.982371] env[62109]: DEBUG nova.compute.manager [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Received event network-changed-0dbb80c9-2197-47a4-942f-1135d1c701d1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 697.982616] env[62109]: DEBUG nova.compute.manager [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Refreshing instance network info cache due to event network-changed-0dbb80c9-2197-47a4-942f-1135d1c701d1. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 697.982924] env[62109]: DEBUG oslo_concurrency.lockutils [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] Acquiring lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.984327] env[62109]: DEBUG oslo_concurrency.lockutils [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] Acquired lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.984508] env[62109]: DEBUG nova.network.neutron [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Refreshing network info cache for port 0dbb80c9-2197-47a4-942f-1135d1c701d1 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 697.984888] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] Expecting reply to msg 0a9176560ad94b468c9c9570801384e4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 697.991718] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a9176560ad94b468c9c9570801384e4 [ 698.015400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fcdc1e449e44eb1a52d8aa123f2bc11 [ 698.029978] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.040380] env[62109]: DEBUG nova.network.neutron [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.116196] env[62109]: DEBUG nova.network.neutron [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.116901] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] Expecting reply to msg 8f7eb0afa8c34f48bc4c06a7736f36a1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 698.127801] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f7eb0afa8c34f48bc4c06a7736f36a1 [ 698.168233] env[62109]: ERROR nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. [ 698.168233] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 698.168233] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.168233] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 698.168233] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 698.168233] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 698.168233] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 698.168233] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 698.168233] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.168233] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 698.168233] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.168233] env[62109]: ERROR nova.compute.manager raise self.value [ 698.168233] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 698.168233] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 698.168233] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.168233] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 698.168720] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.168720] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 698.168720] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. [ 698.168720] env[62109]: ERROR nova.compute.manager [ 698.169318] env[62109]: Traceback (most recent call last): [ 698.169444] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 698.169444] env[62109]: listener.cb(fileno) [ 698.169555] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.169555] env[62109]: result = function(*args, **kwargs) [ 698.169619] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 698.169619] env[62109]: return func(*args, **kwargs) [ 698.169681] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 698.169681] env[62109]: raise e [ 698.169751] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.169751] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 698.169814] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 698.169814] env[62109]: created_port_ids = self._update_ports_for_instance( [ 698.169879] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 698.169879] env[62109]: with excutils.save_and_reraise_exception(): [ 698.169941] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.169941] env[62109]: self.force_reraise() [ 698.170085] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.170085] env[62109]: raise self.value [ 698.170152] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 698.170152] env[62109]: updated_port = self._update_port( [ 698.170244] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.170244] env[62109]: _ensure_no_port_binding_failure(port) [ 698.170323] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.170323] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 698.170400] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. [ 698.170446] env[62109]: Removing descriptor: 16 [ 698.171266] env[62109]: ERROR nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Traceback (most recent call last): [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] yield resources [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self.driver.spawn(context, instance, image_meta, [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self._vmops.spawn(context, instance, image_meta, injected_files, [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] vm_ref = self.build_virtual_machine(instance, [ 698.171266] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] vif_infos = vmwarevif.get_vif_info(self._session, [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] for vif in network_info: [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] return self._sync_wrapper(fn, *args, **kwargs) [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self.wait() [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self[:] = self._gt.wait() [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] return self._exit_event.wait() [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 698.171624] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] result = hub.switch() [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] return self.greenlet.switch() [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] result = function(*args, **kwargs) [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] return func(*args, **kwargs) [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] raise e [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] nwinfo = self.network_api.allocate_for_instance( [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] created_port_ids = self._update_ports_for_instance( [ 698.171976] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] with excutils.save_and_reraise_exception(): [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self.force_reraise() [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] raise self.value [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] updated_port = self._update_port( [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] _ensure_no_port_binding_failure(port) [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] raise exception.PortBindingFailed(port_id=port['id']) [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] nova.exception.PortBindingFailed: Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. [ 698.172333] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] [ 698.173800] env[62109]: INFO nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Terminating instance [ 698.178669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.481599] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 98517e16d6e94687bb1c1ea645b7de6c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 698.486873] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.496070] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98517e16d6e94687bb1c1ea645b7de6c [ 698.608904] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.608904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg a6944a5051324fe4ae7e739d3e16b276 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 698.616275] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6944a5051324fe4ae7e739d3e16b276 [ 698.619651] env[62109]: DEBUG oslo_concurrency.lockutils [req-14e5bcac-d4cd-41cd-97d4-0818a3fa67fa req-cd142b42-24f1-4a0f-8989-c21373fe94ef service nova] Releasing lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.620046] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.620343] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 698.620817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 39cee582a01d4d5a8e123e9194c3ce3e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 698.627191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39cee582a01d4d5a8e123e9194c3ce3e [ 699.051230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "aa1afca5-8194-4a9d-bcd0-e3e91c15338c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.051465] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "aa1afca5-8194-4a9d-bcd0-e3e91c15338c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.110279] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Releasing lock "refresh_cache-83a25ff9-cc7d-4917-95cc-e621884bcee8" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.110553] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 699.110729] env[62109]: DEBUG nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.110938] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 699.141915] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.142484] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg a800ba424ce0448f9c0583a798afd6c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 699.149748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a800ba424ce0448f9c0583a798afd6c4 [ 699.155450] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.248370] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.248828] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg e9cc6b4d9dff4e5abc37395e11b5b10f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 699.259019] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9cc6b4d9dff4e5abc37395e11b5b10f [ 699.435408] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67eed5ed-7f00-4251-89fa-0433a6227c14 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.443196] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed332aa-74f0-4786-9349-9d6a95f7c994 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.473684] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb24a71-d55c-4fb5-864f-c5abcca7e55f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.500642] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc9aed6-c0bc-4c5e-95cc-a3436db63d85 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.507591] env[62109]: DEBUG nova.compute.provider_tree [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.508330] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg b354836726964cdeab6c846b4c1cad1d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 699.515230] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b354836726964cdeab6c846b4c1cad1d [ 699.644785] env[62109]: DEBUG nova.network.neutron [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.645348] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 46fce06ffe1d40f4b16fda4fc080687d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 699.653713] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46fce06ffe1d40f4b16fda4fc080687d [ 699.753162] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.753632] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 699.753828] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 699.754130] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5496e9fd-7036-47ff-9235-5c10f35a5c43 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.766670] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574c92c4-8e0e-4b1f-afdd-d0ea74eeb32f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.791077] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0e018d70-d6dd-4f79-bb03-14b815645562 could not be found. [ 699.797837] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 699.797837] env[62109]: INFO nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Took 0.04 seconds to destroy the instance on the hypervisor. [ 699.797837] env[62109]: DEBUG oslo.service.loopingcall [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.797837] env[62109]: DEBUG nova.compute.manager [-] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.797837] env[62109]: DEBUG nova.network.neutron [-] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 699.822505] env[62109]: DEBUG nova.network.neutron [-] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.823038] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 207302dc437844e59ca0171949a02825 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 699.829266] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 207302dc437844e59ca0171949a02825 [ 700.000413] env[62109]: DEBUG nova.compute.manager [req-aecdd454-b510-4772-a075-188fe09c5125 req-58d0e916-7dc6-4d53-95a2-c45bafc67719 service nova] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Received event network-vif-deleted-0dbb80c9-2197-47a4-942f-1135d1c701d1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 700.016516] env[62109]: DEBUG nova.scheduler.client.report [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 700.018942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg f29e14a2816748e9926ce53eed5679d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 700.031850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f29e14a2816748e9926ce53eed5679d7 [ 700.147233] env[62109]: INFO nova.compute.manager [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] [instance: 83a25ff9-cc7d-4917-95cc-e621884bcee8] Took 1.04 seconds to deallocate network for instance. [ 700.149104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 1095069f5258457eab2bfbf7e7fc3d26 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 700.188033] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1095069f5258457eab2bfbf7e7fc3d26 [ 700.324655] env[62109]: DEBUG nova.network.neutron [-] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.325112] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fb3e6cd725ef492ab7107c51e94bb2a4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 700.333404] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb3e6cd725ef492ab7107c51e94bb2a4 [ 700.521258] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.521787] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 700.523409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 1801d00535094cdb92075f6ae6b584b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 700.524447] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.556s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.526118] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 27a026f176b54a07951c481c9e6d7cf4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 700.562886] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1801d00535094cdb92075f6ae6b584b0 [ 700.572264] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27a026f176b54a07951c481c9e6d7cf4 [ 700.653945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg b5f6c3d51f4e433893b66a11d03dcc90 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 700.683969] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5f6c3d51f4e433893b66a11d03dcc90 [ 700.827821] env[62109]: INFO nova.compute.manager [-] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Took 1.03 seconds to deallocate network for instance. [ 700.830374] env[62109]: DEBUG nova.compute.claims [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 700.830546] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.029295] env[62109]: DEBUG nova.compute.utils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 701.029944] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg a435968e906340e5ada037dceba437b3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 701.034171] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 701.034345] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 701.045030] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a435968e906340e5ada037dceba437b3 [ 701.093426] env[62109]: DEBUG nova.policy [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9b5da32e11744ada3e612ea411872f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03e845d8aa4a47b6954ddf78c83c41aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 701.176041] env[62109]: INFO nova.scheduler.client.report [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Deleted allocations for instance 83a25ff9-cc7d-4917-95cc-e621884bcee8 [ 701.182155] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Expecting reply to msg 199890f65b014c3ca907c889fe038390 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 701.198845] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 199890f65b014c3ca907c889fe038390 [ 701.400390] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Successfully created port: b2097d88-fbe6-444e-861d-6f21a30fe2ba {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.454334] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6512e118-afcd-4845-9558-b0f94034ce7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.463522] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab2bc11-f5c3-41a4-9e8a-4e87afbab486 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.494612] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f91cfd-d978-4af6-97e7-e1d3ee2ddaf6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.502555] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3f9932-2672-4fc1-95d2-d121dba12d78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.515723] env[62109]: DEBUG nova.compute.provider_tree [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.516295] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg b67a13a0e4404bd7883d56ccda8ed98b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 701.524596] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b67a13a0e4404bd7883d56ccda8ed98b [ 701.535363] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 701.537066] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg c3289efb7b554abc8135a7f69170efcf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 701.567490] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3289efb7b554abc8135a7f69170efcf [ 701.684021] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d5bc7237-6f46-40f5-8d1a-1d1435f25dcb tempest-ServerGroupTestJSON-850352494 tempest-ServerGroupTestJSON-850352494-project-member] Lock "83a25ff9-cc7d-4917-95cc-e621884bcee8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.235s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.684614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg cd7bbcf41bf244c6b9f460ce82d3e9b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 701.697300] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd7bbcf41bf244c6b9f460ce82d3e9b6 [ 702.019179] env[62109]: DEBUG nova.scheduler.client.report [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 702.024322] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg afd8778f083848d981489a3d0e99ccd2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 702.033986] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afd8778f083848d981489a3d0e99ccd2 [ 702.041799] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 5f5278eeee9f45afb459cbd35dbef387 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 702.070204] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f5278eeee9f45afb459cbd35dbef387 [ 702.106278] env[62109]: DEBUG nova.compute.manager [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Received event network-changed-b2097d88-fbe6-444e-861d-6f21a30fe2ba {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 702.106498] env[62109]: DEBUG nova.compute.manager [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Refreshing instance network info cache due to event network-changed-b2097d88-fbe6-444e-861d-6f21a30fe2ba. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 702.106738] env[62109]: DEBUG oslo_concurrency.lockutils [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] Acquiring lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.106883] env[62109]: DEBUG oslo_concurrency.lockutils [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] Acquired lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.107065] env[62109]: DEBUG nova.network.neutron [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Refreshing network info cache for port b2097d88-fbe6-444e-861d-6f21a30fe2ba {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 702.107561] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] Expecting reply to msg 21284f5ba2e04b8f98c9be43dc2bb737 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 702.119360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21284f5ba2e04b8f98c9be43dc2bb737 [ 702.190407] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 702.192435] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg a23e01ed125240e999902e856c8a58f0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 702.230661] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a23e01ed125240e999902e856c8a58f0 [ 702.314907] env[62109]: ERROR nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. [ 702.314907] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 702.314907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.314907] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 702.314907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.314907] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 702.314907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.314907] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 702.314907] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.314907] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 702.314907] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.314907] env[62109]: ERROR nova.compute.manager raise self.value [ 702.314907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.314907] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 702.314907] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.314907] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 702.315455] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.315455] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 702.315455] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. [ 702.315455] env[62109]: ERROR nova.compute.manager [ 702.315455] env[62109]: Traceback (most recent call last): [ 702.315455] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 702.315455] env[62109]: listener.cb(fileno) [ 702.315455] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.315455] env[62109]: result = function(*args, **kwargs) [ 702.315455] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.315455] env[62109]: return func(*args, **kwargs) [ 702.315455] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.315455] env[62109]: raise e [ 702.315455] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.315455] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 702.315455] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.315455] env[62109]: created_port_ids = self._update_ports_for_instance( [ 702.315455] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.315455] env[62109]: with excutils.save_and_reraise_exception(): [ 702.315455] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.315455] env[62109]: self.force_reraise() [ 702.315455] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.315455] env[62109]: raise self.value [ 702.315455] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.315455] env[62109]: updated_port = self._update_port( [ 702.315455] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.315455] env[62109]: _ensure_no_port_binding_failure(port) [ 702.315455] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.315455] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 702.316327] env[62109]: nova.exception.PortBindingFailed: Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. [ 702.316327] env[62109]: Removing descriptor: 19 [ 702.525729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.526741] env[62109]: ERROR nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Traceback (most recent call last): [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self.driver.spawn(context, instance, image_meta, [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] vm_ref = self.build_virtual_machine(instance, [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.526741] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] for vif in network_info: [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] return self._sync_wrapper(fn, *args, **kwargs) [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self.wait() [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self[:] = self._gt.wait() [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] return self._exit_event.wait() [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] result = hub.switch() [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 702.527043] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] return self.greenlet.switch() [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] result = function(*args, **kwargs) [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] return func(*args, **kwargs) [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] raise e [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] nwinfo = self.network_api.allocate_for_instance( [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] created_port_ids = self._update_ports_for_instance( [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] with excutils.save_and_reraise_exception(): [ 702.527362] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] self.force_reraise() [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] raise self.value [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] updated_port = self._update_port( [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] _ensure_no_port_binding_failure(port) [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] raise exception.PortBindingFailed(port_id=port['id']) [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] nova.exception.PortBindingFailed: Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. [ 702.527682] env[62109]: ERROR nova.compute.manager [instance: b537150e-9136-4fa4-b092-4f4995b918b7] [ 702.528126] env[62109]: DEBUG nova.compute.utils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 702.528816] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.298s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.530740] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg be2f225b64044c1c933ac2e69c7a609e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 702.531922] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Build of instance b537150e-9136-4fa4-b092-4f4995b918b7 was re-scheduled: Binding failed for port 9b5b0d54-31e6-49e7-a134-cfc1557f1a7c, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 702.532390] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 702.532627] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Acquiring lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.532774] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Acquired lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.532930] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 702.533293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 0874685a92b44e0289117e6194842bb1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 702.544177] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 702.563654] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0874685a92b44e0289117e6194842bb1 [ 702.572059] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 702.572311] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 702.572463] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.572637] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 702.572913] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.572986] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 702.573159] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 702.573315] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 702.573476] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 702.573635] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 702.573805] env[62109]: DEBUG nova.virt.hardware [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 702.574848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be2f225b64044c1c933ac2e69c7a609e [ 702.575736] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd64729a-04dd-4729-a2e6-8c65ad500a4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.584333] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0524a71e-1fe5-42c1-8af5-e0657642e1f2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.599242] env[62109]: ERROR nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Traceback (most recent call last): [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] yield resources [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self.driver.spawn(context, instance, image_meta, [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] vm_ref = self.build_virtual_machine(instance, [ 702.599242] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] for vif in network_info: [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] return self._sync_wrapper(fn, *args, **kwargs) [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self.wait() [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self[:] = self._gt.wait() [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] return self._exit_event.wait() [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 702.599658] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] current.throw(*self._exc) [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] result = function(*args, **kwargs) [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] return func(*args, **kwargs) [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] raise e [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] nwinfo = self.network_api.allocate_for_instance( [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] created_port_ids = self._update_ports_for_instance( [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] with excutils.save_and_reraise_exception(): [ 702.600051] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self.force_reraise() [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] raise self.value [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] updated_port = self._update_port( [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] _ensure_no_port_binding_failure(port) [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] raise exception.PortBindingFailed(port_id=port['id']) [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] nova.exception.PortBindingFailed: Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. [ 702.600417] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] [ 702.600417] env[62109]: INFO nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Terminating instance [ 702.601364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.631893] env[62109]: DEBUG nova.network.neutron [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.712697] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.724271] env[62109]: DEBUG nova.network.neutron [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.724779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] Expecting reply to msg 17983586aab94abf881f062171064418 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 702.733650] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17983586aab94abf881f062171064418 [ 703.054481] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.154141] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.154141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 24f5f76681204b59aad9896960e17967 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 703.160650] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24f5f76681204b59aad9896960e17967 [ 703.226413] env[62109]: DEBUG oslo_concurrency.lockutils [req-23a73ac3-f811-4fb9-9509-d93a72888fea req-e54ef9e7-5eb7-4e52-abf7-a748a1c0d03b service nova] Releasing lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.226818] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquired lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.227015] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 703.227456] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 46b33db491dd4b7f84e70067351757a4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 703.234029] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46b33db491dd4b7f84e70067351757a4 [ 703.423029] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0fd388-0403-4821-9e54-563634264a2e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.430790] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a087a1c9-d832-45f8-9a0d-90404a1b07d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.459410] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a77681b-5858-4259-8ad2-2286263b5d85 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.466291] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a30da9-3a17-4eb9-b6fc-8e127287bcc5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.479462] env[62109]: DEBUG nova.compute.provider_tree [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.479972] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 5e1fad3f8cd34a33b037b73e0cc98d02 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 703.487465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e1fad3f8cd34a33b037b73e0cc98d02 [ 703.654981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Releasing lock "refresh_cache-b537150e-9136-4fa4-b092-4f4995b918b7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.655282] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 703.655367] env[62109]: DEBUG nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 703.655513] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.671480] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.672108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 92ea8ce6c8bd49b4b572c03bdaf01fcf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 703.680613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92ea8ce6c8bd49b4b572c03bdaf01fcf [ 703.755991] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.856964] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.857533] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 106050ab669f4255a96a4173342e4a3d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 703.866034] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 106050ab669f4255a96a4173342e4a3d [ 703.982416] env[62109]: DEBUG nova.scheduler.client.report [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 703.984988] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg dd538c0e7e0d471c933961b1f979d60b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 703.999497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd538c0e7e0d471c933961b1f979d60b [ 704.141360] env[62109]: DEBUG nova.compute.manager [req-73456307-4d62-46b3-906d-917bd25e4dee req-049f769b-75ee-4755-9e58-d7bf918533d6 service nova] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Received event network-vif-deleted-b2097d88-fbe6-444e-861d-6f21a30fe2ba {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 704.175649] env[62109]: DEBUG nova.network.neutron [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.176475] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 464eab5a64b245c09d51349e3c1cef5c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 704.188172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 464eab5a64b245c09d51349e3c1cef5c [ 704.359433] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Releasing lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.359894] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 704.360105] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 704.360413] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-14b8b74f-be3f-480d-b768-c0b4c1f1798e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.369099] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d25de02-5c7d-4024-a0f1-99d68cd04599 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.390085] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c9b2ced5-a77c-4bff-b115-ce5c523be630 could not be found. [ 704.390291] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 704.390470] env[62109]: INFO nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Took 0.03 seconds to destroy the instance on the hypervisor. [ 704.390701] env[62109]: DEBUG oslo.service.loopingcall [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 704.390905] env[62109]: DEBUG nova.compute.manager [-] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 704.390998] env[62109]: DEBUG nova.network.neutron [-] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 704.404557] env[62109]: DEBUG nova.network.neutron [-] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.405024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 62493c45ad35472591234fb20155eeef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 704.412235] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62493c45ad35472591234fb20155eeef [ 704.488271] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.959s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.488994] env[62109]: ERROR nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Traceback (most recent call last): [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self.driver.spawn(context, instance, image_meta, [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] vm_ref = self.build_virtual_machine(instance, [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.488994] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] for vif in network_info: [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] return self._sync_wrapper(fn, *args, **kwargs) [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self.wait() [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self[:] = self._gt.wait() [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] return self._exit_event.wait() [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] current.throw(*self._exc) [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.489306] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] result = function(*args, **kwargs) [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] return func(*args, **kwargs) [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] raise e [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] nwinfo = self.network_api.allocate_for_instance( [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] created_port_ids = self._update_ports_for_instance( [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] with excutils.save_and_reraise_exception(): [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] self.force_reraise() [ 704.489682] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] raise self.value [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] updated_port = self._update_port( [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] _ensure_no_port_binding_failure(port) [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] raise exception.PortBindingFailed(port_id=port['id']) [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] nova.exception.PortBindingFailed: Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. [ 704.490114] env[62109]: ERROR nova.compute.manager [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] [ 704.490114] env[62109]: DEBUG nova.compute.utils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 704.490886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.720s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.492789] env[62109]: INFO nova.compute.claims [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.494177] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 174d3e02ba7c4bfcb8d23e73764b262c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 704.495510] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Build of instance fe9756ba-0eb5-41ad-913f-e933f97542cb was re-scheduled: Binding failed for port 8b1944f9-bd34-42ed-a99d-9be9225ffaf1, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 704.495936] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 704.496162] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Acquiring lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.496300] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Acquired lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.496484] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 704.496792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 5f310a7b7c0447459594273938b1ff22 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 704.509467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f310a7b7c0447459594273938b1ff22 [ 704.529727] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 174d3e02ba7c4bfcb8d23e73764b262c [ 704.678907] env[62109]: INFO nova.compute.manager [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] [instance: b537150e-9136-4fa4-b092-4f4995b918b7] Took 1.02 seconds to deallocate network for instance. [ 704.680667] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 6c067972049c413c92b4b04bf4b1af33 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 704.722835] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c067972049c413c92b4b04bf4b1af33 [ 704.906911] env[62109]: DEBUG nova.network.neutron [-] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.907438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 512552dc5b1a40fe890e98c2e5437d7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 704.915317] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 512552dc5b1a40fe890e98c2e5437d7d [ 705.001681] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 326bbceac7d5468583e7edc108e7aa2c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 705.010978] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 326bbceac7d5468583e7edc108e7aa2c [ 705.032021] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.185698] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg 6b545c349fe74dcca2df75f65a6165fc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 705.219420] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b545c349fe74dcca2df75f65a6165fc [ 705.220675] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.221202] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg acf05a45b37d4f35a4dd6fb2e50c9aeb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 705.232352] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acf05a45b37d4f35a4dd6fb2e50c9aeb [ 705.411742] env[62109]: INFO nova.compute.manager [-] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Took 1.02 seconds to deallocate network for instance. [ 705.414836] env[62109]: DEBUG nova.compute.claims [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 705.415077] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.706670] env[62109]: INFO nova.scheduler.client.report [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Deleted allocations for instance b537150e-9136-4fa4-b092-4f4995b918b7 [ 705.718613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Expecting reply to msg ca62d0be036a4b24925365d40af39603 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 705.722785] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Releasing lock "refresh_cache-fe9756ba-0eb5-41ad-913f-e933f97542cb" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.722989] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 705.723158] env[62109]: DEBUG nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 705.723312] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 705.738056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca62d0be036a4b24925365d40af39603 [ 705.744550] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.745036] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg bb5943082ae6422a98b504f8ff597584 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 705.751640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb5943082ae6422a98b504f8ff597584 [ 705.938084] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6058ff46-9f52-4a21-88cd-ccee17105865 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.945752] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bdc085-4e57-4dae-a944-907b8fb6d970 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.977609] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7446c75f-158f-481c-bb65-2711204246ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.986958] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29cf69f-128d-4b2a-9053-4614d8c3d705 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.003264] env[62109]: DEBUG nova.compute.provider_tree [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.003966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 15720baf224141388d3e70f82e13fe0b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 706.011157] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15720baf224141388d3e70f82e13fe0b [ 706.220863] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a3640907-da62-4cf8-89d4-91cd771388ef tempest-ServerDiagnosticsNegativeTest-1465321625 tempest-ServerDiagnosticsNegativeTest-1465321625-project-member] Lock "b537150e-9136-4fa4-b092-4f4995b918b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.590s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.221497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg b210b725c00243b9bfaaadc3478f9d79 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 706.234985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b210b725c00243b9bfaaadc3478f9d79 [ 706.247292] env[62109]: DEBUG nova.network.neutron [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.247869] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 66b40b98d2a74de0884c11925b4b7835 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 706.256518] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66b40b98d2a74de0884c11925b4b7835 [ 706.506982] env[62109]: DEBUG nova.scheduler.client.report [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.509502] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 4699ed952ee149a8878bf1bfd556adb3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 706.521643] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4699ed952ee149a8878bf1bfd556adb3 [ 706.724211] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 706.726092] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 4ce1050242d548859a44cbd2b7fefc06 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 706.750120] env[62109]: INFO nova.compute.manager [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] [instance: fe9756ba-0eb5-41ad-913f-e933f97542cb] Took 1.03 seconds to deallocate network for instance. [ 706.751784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg cb7c547694eb47d3afbe705309f89a7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 706.778495] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ce1050242d548859a44cbd2b7fefc06 [ 706.788787] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb7c547694eb47d3afbe705309f89a7d [ 707.011749] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.012336] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 707.014034] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 1f3da330d32142959b726c1d560a9d94 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 707.015118] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.154s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.016586] env[62109]: INFO nova.compute.claims [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.018078] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 1feed856b4164deda0cea4b294bfec93 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 707.054442] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f3da330d32142959b726c1d560a9d94 [ 707.082865] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1feed856b4164deda0cea4b294bfec93 [ 707.249064] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.255749] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg 14b94c6936e24124bbad559aee6a6854 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 707.308728] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14b94c6936e24124bbad559aee6a6854 [ 707.521315] env[62109]: DEBUG nova.compute.utils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.521979] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 98c735c336ff4bfa97a523a547368772 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 707.522922] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 707.523094] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 707.526033] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 94e3e08a440b46f3adc03bf969bfd759 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 707.533882] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94e3e08a440b46f3adc03bf969bfd759 [ 707.535050] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98c735c336ff4bfa97a523a547368772 [ 707.579849] env[62109]: DEBUG nova.policy [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9b5da32e11744ada3e612ea411872f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03e845d8aa4a47b6954ddf78c83c41aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 707.779313] env[62109]: INFO nova.scheduler.client.report [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Deleted allocations for instance fe9756ba-0eb5-41ad-913f-e933f97542cb [ 707.785625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Expecting reply to msg c052d1bf73794566a69c6efc5940c284 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 707.804256] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c052d1bf73794566a69c6efc5940c284 [ 707.966610] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Successfully created port: b9a39990-885d-406a-a2d2-338706184202 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.027508] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 708.029342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 860e2909ec8449a9a07718e83098875b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 708.073382] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 860e2909ec8449a9a07718e83098875b [ 708.268565] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Acquiring lock "732cf1e3-823d-4769-ad16-f5b492be53d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.269037] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Lock "732cf1e3-823d-4769-ad16-f5b492be53d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.287830] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8de69702-3c2b-4bac-bfeb-b8b231ed5eb6 tempest-ImagesOneServerNegativeTestJSON-1955627597 tempest-ImagesOneServerNegativeTestJSON-1955627597-project-member] Lock "fe9756ba-0eb5-41ad-913f-e933f97542cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.122s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.288784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg e74160458568407f8b1782546b9bdee2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 708.303129] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e74160458568407f8b1782546b9bdee2 [ 708.531031] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeae2cda-4435-47a2-9713-0e5ff7f0731b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.536548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 3f67e9d8f9dd40d0951f6920243bdab6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 708.543675] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3f6bb3-a79e-4d8a-af3f-d3c0cfa5bba1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.583571] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f67e9d8f9dd40d0951f6920243bdab6 [ 708.585225] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cbbd15-a4d9-4d6a-8004-9ffb85f9bca3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.594098] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c266401-2943-4e5e-a3b4-afa492b0e4c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.609616] env[62109]: DEBUG nova.compute.provider_tree [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.610226] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg bf615efc85864503ab42ac72eda570f5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 708.616883] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf615efc85864503ab42ac72eda570f5 [ 708.793731] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 708.793731] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg c25030853c334b9e8cd17f204ef8cc9c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 708.830527] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c25030853c334b9e8cd17f204ef8cc9c [ 708.901194] env[62109]: DEBUG nova.compute.manager [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Received event network-changed-b9a39990-885d-406a-a2d2-338706184202 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 708.901399] env[62109]: DEBUG nova.compute.manager [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Refreshing instance network info cache due to event network-changed-b9a39990-885d-406a-a2d2-338706184202. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 708.902284] env[62109]: DEBUG oslo_concurrency.lockutils [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] Acquiring lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.902284] env[62109]: DEBUG oslo_concurrency.lockutils [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] Acquired lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.902284] env[62109]: DEBUG nova.network.neutron [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Refreshing network info cache for port b9a39990-885d-406a-a2d2-338706184202 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 708.902640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] Expecting reply to msg 6cebadc72ff74c2c8e421caccea1c5d6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 708.909214] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cebadc72ff74c2c8e421caccea1c5d6 [ 708.989194] env[62109]: ERROR nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. [ 708.989194] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 708.989194] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 708.989194] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 708.989194] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 708.989194] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 708.989194] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 708.989194] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 708.989194] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 708.989194] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 708.989194] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 708.989194] env[62109]: ERROR nova.compute.manager raise self.value [ 708.989194] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 708.989194] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 708.989194] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 708.989194] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 708.989657] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 708.989657] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 708.989657] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. [ 708.989657] env[62109]: ERROR nova.compute.manager [ 708.989657] env[62109]: Traceback (most recent call last): [ 708.989657] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 708.989657] env[62109]: listener.cb(fileno) [ 708.989657] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 708.989657] env[62109]: result = function(*args, **kwargs) [ 708.989657] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 708.989657] env[62109]: return func(*args, **kwargs) [ 708.989657] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 708.989657] env[62109]: raise e [ 708.989657] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 708.989657] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 708.989657] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 708.989657] env[62109]: created_port_ids = self._update_ports_for_instance( [ 708.989657] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 708.989657] env[62109]: with excutils.save_and_reraise_exception(): [ 708.989657] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 708.989657] env[62109]: self.force_reraise() [ 708.989657] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 708.989657] env[62109]: raise self.value [ 708.989657] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 708.989657] env[62109]: updated_port = self._update_port( [ 708.989657] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 708.989657] env[62109]: _ensure_no_port_binding_failure(port) [ 708.989657] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 708.989657] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 708.990565] env[62109]: nova.exception.PortBindingFailed: Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. [ 708.990565] env[62109]: Removing descriptor: 19 [ 709.040293] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 709.067857] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 709.068171] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 709.068328] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.068506] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 709.068648] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.068793] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 709.069003] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 709.069158] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 709.069320] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 709.069475] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 709.069643] env[62109]: DEBUG nova.virt.hardware [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 709.070709] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e8a057-7653-4a4c-9834-299964367b68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.078786] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4906c8-d0f2-49ab-943d-c7ff62712d33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.093809] env[62109]: ERROR nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] Traceback (most recent call last): [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] yield resources [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self.driver.spawn(context, instance, image_meta, [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] vm_ref = self.build_virtual_machine(instance, [ 709.093809] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] for vif in network_info: [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] return self._sync_wrapper(fn, *args, **kwargs) [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self.wait() [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self[:] = self._gt.wait() [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] return self._exit_event.wait() [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 709.094159] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] current.throw(*self._exc) [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] result = function(*args, **kwargs) [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] return func(*args, **kwargs) [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] raise e [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] nwinfo = self.network_api.allocate_for_instance( [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] created_port_ids = self._update_ports_for_instance( [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] with excutils.save_and_reraise_exception(): [ 709.094576] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self.force_reraise() [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] raise self.value [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] updated_port = self._update_port( [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] _ensure_no_port_binding_failure(port) [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] raise exception.PortBindingFailed(port_id=port['id']) [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] nova.exception.PortBindingFailed: Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. [ 709.094947] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] [ 709.094947] env[62109]: INFO nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Terminating instance [ 709.096205] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.113250] env[62109]: DEBUG nova.scheduler.client.report [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 709.117942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg c0c0be85067e4861ad12023be0a8be8a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 709.130539] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0c0be85067e4861ad12023be0a8be8a [ 709.313034] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.419188] env[62109]: DEBUG nova.network.neutron [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 709.521697] env[62109]: DEBUG nova.network.neutron [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.522284] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] Expecting reply to msg 09eaae5f6674427e82e6c04f876b9919 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 709.530833] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09eaae5f6674427e82e6c04f876b9919 [ 709.621174] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.621710] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 709.623931] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 4ba17707cc27441ea358fb0f6529be42 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 709.632070] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.634s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.632070] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg e0eabeff63ad4270909a88ffb25055fb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 709.665874] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ba17707cc27441ea358fb0f6529be42 [ 709.666428] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0eabeff63ad4270909a88ffb25055fb [ 710.025118] env[62109]: DEBUG oslo_concurrency.lockutils [req-21e02f77-9a50-4195-8d64-f3dfbcebc91f req-01852c4d-08b1-4222-b49c-c231c9d9240e service nova] Releasing lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.025626] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquired lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.025741] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 710.026203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg d3f010b4078d40b7b86fc02c381f16c7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 710.033516] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3f010b4078d40b7b86fc02c381f16c7 [ 710.132397] env[62109]: DEBUG nova.compute.utils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.132397] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 09d42a5e392c4566a62e13929cfad845 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 710.132397] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 710.132397] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 710.142592] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09d42a5e392c4566a62e13929cfad845 [ 710.204163] env[62109]: DEBUG nova.policy [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e2a4ff94deb42b4a0dbb88b48c1960f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '801256c0ba804c308bafbf041533b989', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 710.507601] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Successfully created port: 89293cc2-155b-47ed-b5ba-f18c8b302e91 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.553158] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 710.595212] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501f7615-cae1-4eaa-b6b7-4ea462a68b8e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.605413] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4950f266-bcf8-4590-b543-4e35a35c14e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.650798] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 710.653125] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 41d7970cffd5405ea8c2690bcb7a3a5f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 710.656558] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80da3216-1ccc-4eaa-a84f-cd63e4933af4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.664453] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056e7950-943e-41bf-ace2-d14d80676866 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.678585] env[62109]: DEBUG nova.compute.provider_tree [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.679254] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 7dd171dcf37549839feee62d7a1f441a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 710.691143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dd171dcf37549839feee62d7a1f441a [ 710.702184] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.702659] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg fbe492befc5c4a9ab56303370067f415 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 710.712602] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbe492befc5c4a9ab56303370067f415 [ 710.735375] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41d7970cffd5405ea8c2690bcb7a3a5f [ 711.057060] env[62109]: DEBUG nova.compute.manager [req-4bd13648-5017-4cd6-89f4-58f9b576f53c req-85881f33-3397-4540-949d-9631ceebdd0a service nova] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Received event network-vif-deleted-b9a39990-885d-406a-a2d2-338706184202 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 711.188762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 1b9e3622d4714deeaeaba4e700491d82 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 711.196052] env[62109]: DEBUG nova.scheduler.client.report [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 711.196052] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 4652d52e2be442d0b6ce281174ea3ab0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 711.204556] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Releasing lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.205108] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 711.205453] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 711.205854] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bdfe07a6-8adf-4d9f-bc22-1bd308f15f9e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.213163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4652d52e2be442d0b6ce281174ea3ab0 [ 711.217909] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a637fb-0fca-46fd-96a3-7c93cef12abf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.230271] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b9e3622d4714deeaeaba4e700491d82 [ 711.239313] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26a287d7-4602-4d83-8828-41870a49c343 could not be found. [ 711.239635] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 711.239939] env[62109]: INFO nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Took 0.03 seconds to destroy the instance on the hypervisor. [ 711.240317] env[62109]: DEBUG oslo.service.loopingcall [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.240633] env[62109]: DEBUG nova.compute.manager [-] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 711.240836] env[62109]: DEBUG nova.network.neutron [-] [instance: 26a287d7-4602-4d83-8828-41870a49c343] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 711.260151] env[62109]: DEBUG nova.network.neutron [-] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 711.260763] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg af6dfa57669e4aceb69ba2fc9fecc764 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 711.283398] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af6dfa57669e4aceb69ba2fc9fecc764 [ 711.618855] env[62109]: ERROR nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. [ 711.618855] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 711.618855] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.618855] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 711.618855] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.618855] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 711.618855] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.618855] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 711.618855] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.618855] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 711.618855] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.618855] env[62109]: ERROR nova.compute.manager raise self.value [ 711.618855] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.618855] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 711.618855] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.618855] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 711.619289] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.619289] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 711.619289] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. [ 711.619289] env[62109]: ERROR nova.compute.manager [ 711.619289] env[62109]: Traceback (most recent call last): [ 711.619289] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 711.619289] env[62109]: listener.cb(fileno) [ 711.619289] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.619289] env[62109]: result = function(*args, **kwargs) [ 711.619289] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.619289] env[62109]: return func(*args, **kwargs) [ 711.619289] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.619289] env[62109]: raise e [ 711.619289] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.619289] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 711.619289] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.619289] env[62109]: created_port_ids = self._update_ports_for_instance( [ 711.619289] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.619289] env[62109]: with excutils.save_and_reraise_exception(): [ 711.619289] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.619289] env[62109]: self.force_reraise() [ 711.619289] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.619289] env[62109]: raise self.value [ 711.619289] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.619289] env[62109]: updated_port = self._update_port( [ 711.619289] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.619289] env[62109]: _ensure_no_port_binding_failure(port) [ 711.619289] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.619289] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 711.619978] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. [ 711.619978] env[62109]: Removing descriptor: 19 [ 711.695515] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 711.698017] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.073s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.698848] env[62109]: ERROR nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Traceback (most recent call last): [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self.driver.spawn(context, instance, image_meta, [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] vm_ref = self.build_virtual_machine(instance, [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.698848] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] for vif in network_info: [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] return self._sync_wrapper(fn, *args, **kwargs) [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self.wait() [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self[:] = self._gt.wait() [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] return self._exit_event.wait() [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] current.throw(*self._exc) [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.699234] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] result = function(*args, **kwargs) [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] return func(*args, **kwargs) [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] raise e [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] nwinfo = self.network_api.allocate_for_instance( [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] created_port_ids = self._update_ports_for_instance( [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] with excutils.save_and_reraise_exception(): [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] self.force_reraise() [ 711.699576] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] raise self.value [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] updated_port = self._update_port( [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] _ensure_no_port_binding_failure(port) [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] raise exception.PortBindingFailed(port_id=port['id']) [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] nova.exception.PortBindingFailed: Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. [ 711.699938] env[62109]: ERROR nova.compute.manager [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] [ 711.699938] env[62109]: DEBUG nova.compute.utils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 711.700949] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.519s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.702824] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 2dfac0b97cdf4ad3a472f3a66a6f94b4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 711.706254] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Build of instance 11a6eaa1-0d35-49cf-9341-b74129cf087b was re-scheduled: Binding failed for port b1bc0a4e-9fde-485b-9637-ec9f81514cd0, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 711.706254] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 711.706254] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Acquiring lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.706254] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Acquired lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.706441] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.706441] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg aca2447f562d4b6e894bcd220d39d35b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 711.715091] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aca2447f562d4b6e894bcd220d39d35b [ 711.730885] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 711.731127] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 711.731283] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.731463] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 711.731976] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.731976] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 711.731976] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 711.732142] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 711.737106] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 711.737106] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 711.737106] env[62109]: DEBUG nova.virt.hardware [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 711.737106] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39cc91d-e075-49d3-b92a-93cc71090dc3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.741626] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dfac0b97cdf4ad3a472f3a66a6f94b4 [ 711.742919] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44f0ca4-77e3-42a0-bce9-419e768cc907 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.756832] env[62109]: ERROR nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Traceback (most recent call last): [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] yield resources [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self.driver.spawn(context, instance, image_meta, [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] vm_ref = self.build_virtual_machine(instance, [ 711.756832] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] for vif in network_info: [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] return self._sync_wrapper(fn, *args, **kwargs) [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self.wait() [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self[:] = self._gt.wait() [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] return self._exit_event.wait() [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 711.757213] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] current.throw(*self._exc) [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] result = function(*args, **kwargs) [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] return func(*args, **kwargs) [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] raise e [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] nwinfo = self.network_api.allocate_for_instance( [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] created_port_ids = self._update_ports_for_instance( [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] with excutils.save_and_reraise_exception(): [ 711.757563] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self.force_reraise() [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] raise self.value [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] updated_port = self._update_port( [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] _ensure_no_port_binding_failure(port) [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] raise exception.PortBindingFailed(port_id=port['id']) [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] nova.exception.PortBindingFailed: Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. [ 711.757900] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] [ 711.757900] env[62109]: INFO nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Terminating instance [ 711.759150] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Acquiring lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.759304] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Acquired lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.759462] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.759957] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 556d509a1bd5400abc42781d3c761312 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 711.765608] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 556d509a1bd5400abc42781d3c761312 [ 711.773558] env[62109]: DEBUG nova.network.neutron [-] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.773936] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8fc604f92da94cab8c9106ecf14588be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 711.782961] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fc604f92da94cab8c9106ecf14588be [ 712.234834] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.275253] env[62109]: INFO nova.compute.manager [-] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Took 1.03 seconds to deallocate network for instance. [ 712.277503] env[62109]: DEBUG nova.compute.claims [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 712.277680] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.302326] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.407330] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.407966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 8610970203a447bc90a8886f8a3fcad8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 712.415698] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8610970203a447bc90a8886f8a3fcad8 [ 712.507873] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.508753] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg acada4b9da6c4f7ca84e148bb322aa78 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 712.536448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acada4b9da6c4f7ca84e148bb322aa78 [ 712.639355] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24261fb-33f0-46a2-b86e-cb4fe2f7cb29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.644231] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666a30dc-3a9f-431e-a11a-622a0e46797c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.678192] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28b625a-0f41-43df-a809-4970c166ec6a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.685941] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad70b75-c899-4e24-a8ca-a40a1afd0f83 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.699574] env[62109]: DEBUG nova.compute.provider_tree [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.700997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 1c1e7beded7a4ed08f476aa9636b79b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 712.708846] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c1e7beded7a4ed08f476aa9636b79b6 [ 712.913278] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Releasing lock "refresh_cache-11a6eaa1-0d35-49cf-9341-b74129cf087b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.915341] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 712.915341] env[62109]: DEBUG nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.915341] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 712.942467] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.943062] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 72571fb4a4ad4496a799a0d323d6d871 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 712.950911] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72571fb4a4ad4496a799a0d323d6d871 [ 713.010762] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Releasing lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.011261] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 713.011439] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 713.011737] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f992cffa-1f50-43fe-9912-e7b8247d760f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.023366] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4c4651-4a95-4939-9477-b712e9bcaaa4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.045111] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09 could not be found. [ 713.045416] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 713.045508] env[62109]: INFO nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Took 0.03 seconds to destroy the instance on the hypervisor. [ 713.045744] env[62109]: DEBUG oslo.service.loopingcall [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.045999] env[62109]: DEBUG nova.compute.manager [-] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 713.046143] env[62109]: DEBUG nova.network.neutron [-] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 713.067621] env[62109]: DEBUG nova.network.neutron [-] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.068190] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 700e5b678a314c5fa800cb358e2bde38 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.074312] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 700e5b678a314c5fa800cb358e2bde38 [ 713.085290] env[62109]: DEBUG nova.compute.manager [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Received event network-changed-89293cc2-155b-47ed-b5ba-f18c8b302e91 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 713.085733] env[62109]: DEBUG nova.compute.manager [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Refreshing instance network info cache due to event network-changed-89293cc2-155b-47ed-b5ba-f18c8b302e91. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 713.085733] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] Acquiring lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.085932] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] Acquired lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.086016] env[62109]: DEBUG nova.network.neutron [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Refreshing network info cache for port 89293cc2-155b-47ed-b5ba-f18c8b302e91 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 713.086539] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] Expecting reply to msg b5423556013f42d1b35633e8d6fadd6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.093648] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5423556013f42d1b35633e8d6fadd6d [ 713.125231] env[62109]: DEBUG nova.network.neutron [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.204149] env[62109]: DEBUG nova.scheduler.client.report [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 713.207568] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 49040b01663b4671ad8a5025f66581e8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.218323] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49040b01663b4671ad8a5025f66581e8 [ 713.293532] env[62109]: DEBUG nova.network.neutron [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.293829] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] Expecting reply to msg f398b2fbb33a40c6a2d73cc998368416 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.302749] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f398b2fbb33a40c6a2d73cc998368416 [ 713.451725] env[62109]: DEBUG nova.network.neutron [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.452236] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 651ce8b6810c4a9d9381c2b4a45e7031 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.467848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 651ce8b6810c4a9d9381c2b4a45e7031 [ 713.570036] env[62109]: DEBUG nova.network.neutron [-] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.570522] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c49f62af45334b94a4df7ccf8c03e6ea in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.580203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c49f62af45334b94a4df7ccf8c03e6ea [ 713.713517] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.012s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.714172] env[62109]: ERROR nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Traceback (most recent call last): [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self.driver.spawn(context, instance, image_meta, [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] vm_ref = self.build_virtual_machine(instance, [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.714172] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] for vif in network_info: [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] return self._sync_wrapper(fn, *args, **kwargs) [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self.wait() [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self[:] = self._gt.wait() [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] return self._exit_event.wait() [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] result = hub.switch() [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 713.714555] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] return self.greenlet.switch() [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] result = function(*args, **kwargs) [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] return func(*args, **kwargs) [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] raise e [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] nwinfo = self.network_api.allocate_for_instance( [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] created_port_ids = self._update_ports_for_instance( [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] with excutils.save_and_reraise_exception(): [ 713.714941] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] self.force_reraise() [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] raise self.value [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] updated_port = self._update_port( [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] _ensure_no_port_binding_failure(port) [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] raise exception.PortBindingFailed(port_id=port['id']) [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] nova.exception.PortBindingFailed: Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. [ 713.715300] env[62109]: ERROR nova.compute.manager [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] [ 713.715597] env[62109]: DEBUG nova.compute.utils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 713.716134] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.686s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.719050] env[62109]: INFO nova.compute.claims [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.719722] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 87c4a87ceee8441f80dc4e1e6d68344c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.724028] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Build of instance 3a4e1dcc-610f-4037-94e9-c9815c12ed1d was re-scheduled: Binding failed for port 2ee61c01-65fa-4dc1-b14e-99dd8e88ccfe, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 713.724028] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 713.724028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.724028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquired lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.724246] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.724246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 238243ab911448e9b7381f3867ab451f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.732912] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 238243ab911448e9b7381f3867ab451f [ 713.773437] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87c4a87ceee8441f80dc4e1e6d68344c [ 713.804123] env[62109]: DEBUG oslo_concurrency.lockutils [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] Releasing lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.804123] env[62109]: DEBUG nova.compute.manager [req-0dd4ca31-2fa2-478d-8312-759c567105c3 req-2346c01c-cc2a-430f-ba57-658e7e91b714 service nova] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Received event network-vif-deleted-89293cc2-155b-47ed-b5ba-f18c8b302e91 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 713.954448] env[62109]: INFO nova.compute.manager [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] [instance: 11a6eaa1-0d35-49cf-9341-b74129cf087b] Took 1.04 seconds to deallocate network for instance. [ 713.956170] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg ad02f06c10c443b78b5d235d46b59527 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 713.997046] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad02f06c10c443b78b5d235d46b59527 [ 714.077032] env[62109]: INFO nova.compute.manager [-] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Took 1.03 seconds to deallocate network for instance. [ 714.084975] env[62109]: DEBUG nova.compute.claims [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 714.085044] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.225875] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg b06828ce81f94767918046fdf13f01b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 714.235264] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b06828ce81f94767918046fdf13f01b6 [ 714.243789] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.348751] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.349304] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 7eb2af8a5ebe480f8d248e44d5cc1c11 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 714.357718] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7eb2af8a5ebe480f8d248e44d5cc1c11 [ 714.460432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg 68bae0469961417f98d3379c60e8c761 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 714.515556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68bae0469961417f98d3379c60e8c761 [ 714.611174] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Acquiring lock "436788b9-92bb-4088-9c24-c2e9a073c09d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.611405] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Lock "436788b9-92bb-4088-9c24-c2e9a073c09d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.852225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Releasing lock "refresh_cache-3a4e1dcc-610f-4037-94e9-c9815c12ed1d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.852225] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 714.852225] env[62109]: DEBUG nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.852349] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 714.877650] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.878401] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 286e4b32baf84e6c917a13875c3f8c1c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 714.887306] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 286e4b32baf84e6c917a13875c3f8c1c [ 715.000297] env[62109]: INFO nova.scheduler.client.report [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Deleted allocations for instance 11a6eaa1-0d35-49cf-9341-b74129cf087b [ 715.007125] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Expecting reply to msg e12395a26e95446d98bb67afa179fa13 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 715.024518] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e12395a26e95446d98bb67afa179fa13 [ 715.214242] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed9af4e-d6cb-4f3d-b04d-e7b4f7682af2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.222203] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e645bc7-baf7-40ef-aa80-080255ca65ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.256179] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e849673-6d13-48a2-af8d-66d87b487886 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.268491] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3e75e1-34db-4b59-90bf-b60ea7d544fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.279921] env[62109]: DEBUG nova.compute.provider_tree [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.280874] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg ea0a42da60fb4715806d22229ef5c861 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 715.288713] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea0a42da60fb4715806d22229ef5c861 [ 715.380122] env[62109]: DEBUG nova.network.neutron [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.380691] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 925f22301aa44e53b2c0904fb691ea9a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 715.416871] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 925f22301aa44e53b2c0904fb691ea9a [ 715.512904] env[62109]: DEBUG oslo_concurrency.lockutils [None req-53233ff4-749b-4f63-8c48-18354418014f tempest-InstanceActionsNegativeTestJSON-308951544 tempest-InstanceActionsNegativeTestJSON-308951544-project-member] Lock "11a6eaa1-0d35-49cf-9341-b74129cf087b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.102s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.513274] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 0f4545ba3b494246a3293db3fde9ebda in queue reply_7522b64acfeb4981b1f36928b040d568 [ 715.528532] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f4545ba3b494246a3293db3fde9ebda [ 715.783164] env[62109]: DEBUG nova.scheduler.client.report [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.785726] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg be3d74bd8ea14f28bb60d70accfde667 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 715.807579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be3d74bd8ea14f28bb60d70accfde667 [ 715.882941] env[62109]: INFO nova.compute.manager [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3a4e1dcc-610f-4037-94e9-c9815c12ed1d] Took 1.03 seconds to deallocate network for instance. [ 715.884804] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg c3726852a6ad49ec8d1132b943d9d57c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 715.925467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3726852a6ad49ec8d1132b943d9d57c [ 716.015256] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 716.017081] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 26981dc756dc46779a471260c1113f94 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 716.053091] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26981dc756dc46779a471260c1113f94 [ 716.288785] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.289356] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 716.291111] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg def0af04ddae482dbe88be0a87327eba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 716.292553] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.462s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.293788] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 699215ee6de64514a7b2daa3ca177488 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 716.331409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 699215ee6de64514a7b2daa3ca177488 [ 716.338139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg def0af04ddae482dbe88be0a87327eba [ 716.390149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg b9306ed41b5549e59f740bcc3a815847 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 716.420120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9306ed41b5549e59f740bcc3a815847 [ 716.538396] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.796918] env[62109]: DEBUG nova.compute.utils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 716.797591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 0db79f1fb1db4f26a792d3794283adf1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 716.805403] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 716.805403] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 716.808074] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0db79f1fb1db4f26a792d3794283adf1 [ 716.914214] env[62109]: INFO nova.scheduler.client.report [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Deleted allocations for instance 3a4e1dcc-610f-4037-94e9-c9815c12ed1d [ 716.925370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg c893b91fffe34c7fa1b6af53e3941e23 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 716.940462] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c893b91fffe34c7fa1b6af53e3941e23 [ 717.063018] env[62109]: DEBUG nova.policy [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fa210afa0ba4ce8a50b24f78a4c69f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '214dea28f048427e8a9eb82bcc2cb063', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 717.302153] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 717.304041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 04bfd682a8e849b5b5381542fcb73d5f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 717.309383] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Acquiring lock "53d6d89d-04bb-421d-994c-014830491dfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.309655] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Lock "53d6d89d-04bb-421d-994c-014830491dfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.355058] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e506c64-5a2f-4f3b-9675-37063d0acd4c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.363448] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247d7f54-7278-418f-b9d5-5181208cae24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.394447] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04bfd682a8e849b5b5381542fcb73d5f [ 717.395370] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b617a064-ecca-4a20-8b5b-1e173fec43a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.403362] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862d2a66-a607-47a8-b30e-f76797414e27 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.417243] env[62109]: DEBUG nova.compute.provider_tree [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.417804] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg aad85c7fdf234db49d679b692cd09de5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 717.425329] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aad85c7fdf234db49d679b692cd09de5 [ 717.428181] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cb601abf-819c-4685-a22f-8b0340cfd065 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "3a4e1dcc-610f-4037-94e9-c9815c12ed1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.861s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.428702] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 45a35ae1062f4f2eb9d30d51bf5e37e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 717.442379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45a35ae1062f4f2eb9d30d51bf5e37e5 [ 717.548103] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Successfully created port: a59f3c1f-3c1e-42a5-9305-85d22fee72e2 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.812805] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 96d6c4d1f83146f0b69b6e3bf8d34149 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 717.863104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96d6c4d1f83146f0b69b6e3bf8d34149 [ 717.920120] env[62109]: DEBUG nova.scheduler.client.report [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.922555] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 98fc8c2f0ecd4cb2a2219e509da30681 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 717.931517] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.933271] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 3820a52f10e5466d96bcfd9897100787 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 717.935078] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98fc8c2f0ecd4cb2a2219e509da30681 [ 717.980631] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3820a52f10e5466d96bcfd9897100787 [ 718.316625] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 718.347380] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 718.347632] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 718.347786] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.347995] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 718.348254] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.348349] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 718.348588] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 718.348704] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 718.348865] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 718.349036] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 718.349220] env[62109]: DEBUG nova.virt.hardware [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 718.350061] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455d5547-7efa-4378-ae1d-71ae6c402905 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.358520] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81c2f1e-adf5-4412-907a-63d733d8d8f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.393384] env[62109]: DEBUG nova.compute.manager [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Received event network-changed-a59f3c1f-3c1e-42a5-9305-85d22fee72e2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 718.393560] env[62109]: DEBUG nova.compute.manager [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Refreshing instance network info cache due to event network-changed-a59f3c1f-3c1e-42a5-9305-85d22fee72e2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 718.393765] env[62109]: DEBUG oslo_concurrency.lockutils [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] Acquiring lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.393904] env[62109]: DEBUG oslo_concurrency.lockutils [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] Acquired lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.394056] env[62109]: DEBUG nova.network.neutron [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Refreshing network info cache for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 718.394502] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] Expecting reply to msg 7ee63452084348e1bb102aed1c92a449 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 718.400556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ee63452084348e1bb102aed1c92a449 [ 718.425266] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.133s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.425981] env[62109]: ERROR nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Traceback (most recent call last): [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self.driver.spawn(context, instance, image_meta, [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self._vmops.spawn(context, instance, image_meta, injected_files, [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] vm_ref = self.build_virtual_machine(instance, [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] vif_infos = vmwarevif.get_vif_info(self._session, [ 718.425981] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] for vif in network_info: [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] return self._sync_wrapper(fn, *args, **kwargs) [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self.wait() [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self[:] = self._gt.wait() [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] return self._exit_event.wait() [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] result = hub.switch() [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 718.426328] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] return self.greenlet.switch() [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] result = function(*args, **kwargs) [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] return func(*args, **kwargs) [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] raise e [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] nwinfo = self.network_api.allocate_for_instance( [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] created_port_ids = self._update_ports_for_instance( [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] with excutils.save_and_reraise_exception(): [ 718.426702] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] self.force_reraise() [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] raise self.value [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] updated_port = self._update_port( [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] _ensure_no_port_binding_failure(port) [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] raise exception.PortBindingFailed(port_id=port['id']) [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] nova.exception.PortBindingFailed: Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. [ 718.427072] env[62109]: ERROR nova.compute.manager [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] [ 718.427403] env[62109]: DEBUG nova.compute.utils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 718.427800] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.715s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.429698] env[62109]: INFO nova.compute.claims [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.431272] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 7d5119f2f93e42ae8d11a82276e31942 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 718.443269] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Build of instance 0e018d70-d6dd-4f79-bb03-14b815645562 was re-scheduled: Binding failed for port 0dbb80c9-2197-47a4-942f-1135d1c701d1, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 718.443269] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 718.443269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.443269] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.443269] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 718.443753] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 4a41f972fcce42b9a4eff22a145ea874 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 718.443753] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a41f972fcce42b9a4eff22a145ea874 [ 718.454686] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.484730] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d5119f2f93e42ae8d11a82276e31942 [ 718.593944] env[62109]: ERROR nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. [ 718.593944] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 718.593944] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 718.593944] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 718.593944] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 718.593944] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 718.593944] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 718.593944] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 718.593944] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.593944] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 718.593944] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.593944] env[62109]: ERROR nova.compute.manager raise self.value [ 718.593944] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 718.593944] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 718.593944] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.593944] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 718.594461] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.594461] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 718.594461] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. [ 718.594461] env[62109]: ERROR nova.compute.manager [ 718.594461] env[62109]: Traceback (most recent call last): [ 718.594461] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 718.594461] env[62109]: listener.cb(fileno) [ 718.594461] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 718.594461] env[62109]: result = function(*args, **kwargs) [ 718.594461] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 718.594461] env[62109]: return func(*args, **kwargs) [ 718.594461] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 718.594461] env[62109]: raise e [ 718.594461] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 718.594461] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 718.594461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 718.594461] env[62109]: created_port_ids = self._update_ports_for_instance( [ 718.594461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 718.594461] env[62109]: with excutils.save_and_reraise_exception(): [ 718.594461] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.594461] env[62109]: self.force_reraise() [ 718.594461] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.594461] env[62109]: raise self.value [ 718.594461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 718.594461] env[62109]: updated_port = self._update_port( [ 718.594461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.594461] env[62109]: _ensure_no_port_binding_failure(port) [ 718.594461] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.594461] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 718.595315] env[62109]: nova.exception.PortBindingFailed: Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. [ 718.595315] env[62109]: Removing descriptor: 16 [ 718.595315] env[62109]: ERROR nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Traceback (most recent call last): [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] yield resources [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self.driver.spawn(context, instance, image_meta, [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 718.595315] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] vm_ref = self.build_virtual_machine(instance, [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] for vif in network_info: [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] return self._sync_wrapper(fn, *args, **kwargs) [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self.wait() [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self[:] = self._gt.wait() [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] return self._exit_event.wait() [ 718.595855] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] result = hub.switch() [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] return self.greenlet.switch() [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] result = function(*args, **kwargs) [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] return func(*args, **kwargs) [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] raise e [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] nwinfo = self.network_api.allocate_for_instance( [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 718.596373] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] created_port_ids = self._update_ports_for_instance( [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] with excutils.save_and_reraise_exception(): [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self.force_reraise() [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] raise self.value [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] updated_port = self._update_port( [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] _ensure_no_port_binding_failure(port) [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.596710] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] raise exception.PortBindingFailed(port_id=port['id']) [ 718.597053] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] nova.exception.PortBindingFailed: Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. [ 718.597053] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] [ 718.597053] env[62109]: INFO nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Terminating instance [ 718.597175] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Acquiring lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.910363] env[62109]: DEBUG nova.network.neutron [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.939595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg a1cd186dcdb7443e9868b2e59296a74b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 718.947470] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1cd186dcdb7443e9868b2e59296a74b [ 718.959951] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.991017] env[62109]: DEBUG nova.network.neutron [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.991552] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] Expecting reply to msg 7f1a563c326e4492b2a8c195720cc230 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 718.999349] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f1a563c326e4492b2a8c195720cc230 [ 719.111733] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.112272] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg b6debf296587415d9f1df39fa998e56d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 719.121436] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6debf296587415d9f1df39fa998e56d [ 719.495598] env[62109]: DEBUG oslo_concurrency.lockutils [req-4bc0632a-4ab8-4c3d-87df-8d8a630a963b req-731c159f-2e95-4a63-923e-9947da11fa32 service nova] Releasing lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.495598] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Acquired lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.495598] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 719.495598] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 30f260809db74c66b8655c5e15c3e8f0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 719.508844] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30f260809db74c66b8655c5e15c3e8f0 [ 719.614790] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-0e018d70-d6dd-4f79-bb03-14b815645562" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.615206] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 719.615565] env[62109]: DEBUG nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 719.615864] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 719.637970] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 719.638643] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg fadccb208e88466a904516c41e9f0d5f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 719.646426] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fadccb208e88466a904516c41e9f0d5f [ 719.882064] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd814e0-86a5-4498-a307-2e56c484ba44 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.889683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427f21fe-2fd2-447c-b344-1a6b2f21cabe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.927023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b7e3dd-cd7b-48ef-9e5d-8da3a709a9fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.935823] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2234415a-516e-494e-8d96-6ec9abb73a26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.950822] env[62109]: DEBUG nova.compute.provider_tree [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.951299] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg d0fd92b8e55b4cbaaf716858b4a27da5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 719.958187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0fd92b8e55b4cbaaf716858b4a27da5 [ 720.017533] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.113126] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.113959] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 3112d0661bdb45ab87a755e6a97c8265 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 720.129305] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3112d0661bdb45ab87a755e6a97c8265 [ 720.143128] env[62109]: DEBUG nova.network.neutron [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.143963] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg f48789f4afbf42bfa3b7f152b671cb43 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 720.157827] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f48789f4afbf42bfa3b7f152b671cb43 [ 720.422108] env[62109]: DEBUG nova.compute.manager [req-1037af62-8d6f-4173-8ace-ecd97fdbf2ee req-b4c5a3ab-8cec-4e1d-a750-e9d23ba83aa5 service nova] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Received event network-vif-deleted-a59f3c1f-3c1e-42a5-9305-85d22fee72e2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 720.453442] env[62109]: DEBUG nova.scheduler.client.report [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 720.455904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 8e10af3933b14598b1d1a5702c0f19d6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 720.474924] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e10af3933b14598b1d1a5702c0f19d6 [ 720.532027] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "3ada5090-7219-4835-b508-2188501ae5e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.532027] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "3ada5090-7219-4835-b508-2188501ae5e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.623203] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Releasing lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.623656] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 720.623853] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 720.624181] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26bd4c23-dfaf-45aa-9790-5d18d7863175 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.632654] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e4899d-f42e-4e2c-9ebd-9fe9673fce71 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.651180] env[62109]: INFO nova.compute.manager [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 0e018d70-d6dd-4f79-bb03-14b815645562] Took 1.04 seconds to deallocate network for instance. [ 720.652832] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 7bd2a0f6d052462caa94d2e65b434999 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 720.664473] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 29715a53-7a71-4708-b522-e678fe5bd6a9 could not be found. [ 720.664680] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 720.664851] env[62109]: INFO nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 720.665083] env[62109]: DEBUG oslo.service.loopingcall [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.665292] env[62109]: DEBUG nova.compute.manager [-] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 720.665383] env[62109]: DEBUG nova.network.neutron [-] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 720.684031] env[62109]: DEBUG nova.network.neutron [-] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.684031] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5decb58b59094b759dc8596f0ac9d458 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 720.684636] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bd2a0f6d052462caa94d2e65b434999 [ 720.689966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5decb58b59094b759dc8596f0ac9d458 [ 720.958466] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.531s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.959119] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 720.960833] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 7bd31d5c521345deaeaf81bcc2e2da9a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 720.961850] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.547s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.963577] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 0fa46cad8535444e99886dd87c28c85c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 721.007329] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fa46cad8535444e99886dd87c28c85c [ 721.007987] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bd31d5c521345deaeaf81bcc2e2da9a [ 721.157832] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg c132dc0b59b04f64bfdceaf77a895672 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 721.184576] env[62109]: DEBUG nova.network.neutron [-] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.184576] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6cb8a7de629d4a0595d0abecf8779acc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 721.191480] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cb8a7de629d4a0595d0abecf8779acc [ 721.193715] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c132dc0b59b04f64bfdceaf77a895672 [ 721.466905] env[62109]: DEBUG nova.compute.utils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.467557] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 22f68fad10654f0295d96aff2c80ea20 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 721.471484] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 721.471586] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 721.477961] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22f68fad10654f0295d96aff2c80ea20 [ 721.512412] env[62109]: DEBUG nova.policy [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4ea22bb21004f69a2b27d306493db45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '423f777bec3c474a91970fce3e308097', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 721.682584] env[62109]: INFO nova.scheduler.client.report [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Deleted allocations for instance 0e018d70-d6dd-4f79-bb03-14b815645562 [ 721.688855] env[62109]: INFO nova.compute.manager [-] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Took 1.02 seconds to deallocate network for instance. [ 721.692053] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a7bc8a6dee274a67b0c2a2c5943ea3eb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 721.697874] env[62109]: DEBUG nova.compute.claims [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 721.698126] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.705115] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7bc8a6dee274a67b0c2a2c5943ea3eb [ 721.814544] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Successfully created port: 7dc696ee-8e41-4b30-9400-d4ba7c26b170 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 721.900272] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdcc7c5-7e50-4d14-89cf-83a0d27af545 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.907780] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68aab340-b51c-404c-acf2-130825030ac5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.938960] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0144f464-7c74-420f-9cee-749ae3614548 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.946337] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59f9a20-0b1e-47ab-b12f-e953a78143d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.959173] env[62109]: DEBUG nova.compute.provider_tree [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.959651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 4ee51723d19240e1a3732ecbedb23e3f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 721.966459] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ee51723d19240e1a3732ecbedb23e3f [ 721.971643] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 721.973485] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 764d8d8f39d2471cbfa0f52bd9315c6e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 722.004955] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 764d8d8f39d2471cbfa0f52bd9315c6e [ 722.198506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f65890f8-d697-442e-af8a-c53a9fdd2611 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "0e018d70-d6dd-4f79-bb03-14b815645562" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.983s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.199246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg ac8a4aeebfae487e94ca965bcd879bd0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 722.209292] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac8a4aeebfae487e94ca965bcd879bd0 [ 722.462812] env[62109]: DEBUG nova.scheduler.client.report [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.465388] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg cd1caed80dc94493be92dbcb8a36b24b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 722.477569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 01cea3faf82d491ab6031c3fd7af5c67 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 722.478733] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd1caed80dc94493be92dbcb8a36b24b [ 722.517483] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01cea3faf82d491ab6031c3fd7af5c67 [ 722.609770] env[62109]: DEBUG nova.compute.manager [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Received event network-changed-7dc696ee-8e41-4b30-9400-d4ba7c26b170 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 722.610190] env[62109]: DEBUG nova.compute.manager [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Refreshing instance network info cache due to event network-changed-7dc696ee-8e41-4b30-9400-d4ba7c26b170. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 722.610190] env[62109]: DEBUG oslo_concurrency.lockutils [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] Acquiring lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.610320] env[62109]: DEBUG oslo_concurrency.lockutils [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] Acquired lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.610577] env[62109]: DEBUG nova.network.neutron [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Refreshing network info cache for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 722.610885] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] Expecting reply to msg e93cb2d8bda84a2e83422fd599020fd3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 722.617255] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e93cb2d8bda84a2e83422fd599020fd3 [ 722.701635] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 722.703896] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg a24619a26f024b04af79d3627badba2d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 722.737890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a24619a26f024b04af79d3627badba2d [ 722.764946] env[62109]: ERROR nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. [ 722.764946] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 722.764946] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.764946] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 722.764946] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 722.764946] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 722.764946] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 722.764946] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 722.764946] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.764946] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 722.764946] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.764946] env[62109]: ERROR nova.compute.manager raise self.value [ 722.764946] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 722.764946] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 722.764946] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.764946] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 722.765475] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.765475] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 722.765475] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. [ 722.765475] env[62109]: ERROR nova.compute.manager [ 722.765475] env[62109]: Traceback (most recent call last): [ 722.765475] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 722.765475] env[62109]: listener.cb(fileno) [ 722.765475] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.765475] env[62109]: result = function(*args, **kwargs) [ 722.765475] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 722.765475] env[62109]: return func(*args, **kwargs) [ 722.765475] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.765475] env[62109]: raise e [ 722.765475] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.765475] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 722.765475] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 722.765475] env[62109]: created_port_ids = self._update_ports_for_instance( [ 722.765475] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 722.765475] env[62109]: with excutils.save_and_reraise_exception(): [ 722.765475] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.765475] env[62109]: self.force_reraise() [ 722.765475] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.765475] env[62109]: raise self.value [ 722.765475] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 722.765475] env[62109]: updated_port = self._update_port( [ 722.765475] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.765475] env[62109]: _ensure_no_port_binding_failure(port) [ 722.765475] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.765475] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 722.766379] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. [ 722.766379] env[62109]: Removing descriptor: 19 [ 722.968127] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.006s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.969079] env[62109]: ERROR nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Traceback (most recent call last): [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self.driver.spawn(context, instance, image_meta, [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self._vmops.spawn(context, instance, image_meta, injected_files, [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] vm_ref = self.build_virtual_machine(instance, [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] vif_infos = vmwarevif.get_vif_info(self._session, [ 722.969079] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] for vif in network_info: [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] return self._sync_wrapper(fn, *args, **kwargs) [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self.wait() [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self[:] = self._gt.wait() [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] return self._exit_event.wait() [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] current.throw(*self._exc) [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.969455] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] result = function(*args, **kwargs) [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] return func(*args, **kwargs) [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] raise e [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] nwinfo = self.network_api.allocate_for_instance( [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] created_port_ids = self._update_ports_for_instance( [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] with excutils.save_and_reraise_exception(): [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] self.force_reraise() [ 722.969802] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] raise self.value [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] updated_port = self._update_port( [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] _ensure_no_port_binding_failure(port) [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] raise exception.PortBindingFailed(port_id=port['id']) [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] nova.exception.PortBindingFailed: Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. [ 722.970218] env[62109]: ERROR nova.compute.manager [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] [ 722.970218] env[62109]: DEBUG nova.compute.utils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 722.970925] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.722s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.972978] env[62109]: INFO nova.compute.claims [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.974499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 453491544b41464594715146755530dc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 722.975683] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Build of instance c9b2ced5-a77c-4bff-b115-ce5c523be630 was re-scheduled: Binding failed for port b2097d88-fbe6-444e-861d-6f21a30fe2ba, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 722.976116] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 722.976335] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.976481] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquired lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.976667] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 722.977052] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 556d848587d24ab1b792c308c36ccf36 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 722.981074] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 722.983396] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 556d848587d24ab1b792c308c36ccf36 [ 723.001390] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 723.001621] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 723.001823] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.002037] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 723.002188] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.002328] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 723.002529] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 723.002689] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 723.002853] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 723.003014] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 723.003182] env[62109]: DEBUG nova.virt.hardware [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 723.004347] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63bcb74-965c-4c2c-84d7-f6cca79e1b0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.009489] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 453491544b41464594715146755530dc [ 723.013129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f29c57b-daca-471c-9f57-8e2fe146ae3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.026804] env[62109]: ERROR nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Traceback (most recent call last): [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] yield resources [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self.driver.spawn(context, instance, image_meta, [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] vm_ref = self.build_virtual_machine(instance, [ 723.026804] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] vif_infos = vmwarevif.get_vif_info(self._session, [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] for vif in network_info: [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] return self._sync_wrapper(fn, *args, **kwargs) [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self.wait() [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self[:] = self._gt.wait() [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] return self._exit_event.wait() [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 723.027201] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] current.throw(*self._exc) [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] result = function(*args, **kwargs) [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] return func(*args, **kwargs) [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] raise e [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] nwinfo = self.network_api.allocate_for_instance( [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] created_port_ids = self._update_ports_for_instance( [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] with excutils.save_and_reraise_exception(): [ 723.027546] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self.force_reraise() [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] raise self.value [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] updated_port = self._update_port( [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] _ensure_no_port_binding_failure(port) [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] raise exception.PortBindingFailed(port_id=port['id']) [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] nova.exception.PortBindingFailed: Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. [ 723.027905] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] [ 723.027905] env[62109]: INFO nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Terminating instance [ 723.029867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.127316] env[62109]: DEBUG nova.network.neutron [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 723.228497] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.240980] env[62109]: DEBUG nova.network.neutron [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.241508] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] Expecting reply to msg d92a1441e34043a5940e125a859bba33 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 723.254990] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d92a1441e34043a5940e125a859bba33 [ 723.479848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 7460f5b299dd43458ad56acbe1217904 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 723.491773] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7460f5b299dd43458ad56acbe1217904 [ 723.500687] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 723.662461] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.662987] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg d48f44e5c8f14d668530e2ef38e239f8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 723.672971] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d48f44e5c8f14d668530e2ef38e239f8 [ 723.748648] env[62109]: DEBUG oslo_concurrency.lockutils [req-1ab94175-bcfc-4a1d-9006-f5ec7a06b85c req-325632ed-e8ce-48cc-a515-6890c6430ee5 service nova] Releasing lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.748648] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquired lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.748648] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 723.748648] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 1e796af4b9084ae2ad6c405605ad9fe4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 723.751594] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e796af4b9084ae2ad6c405605ad9fe4 [ 724.165064] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Releasing lock "refresh_cache-c9b2ced5-a77c-4bff-b115-ce5c523be630" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.165301] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 724.165482] env[62109]: DEBUG nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 724.165647] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 724.179337] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.179880] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg bdddf6b7c8a1490aa6314a097d01541e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 724.186452] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdddf6b7c8a1490aa6314a097d01541e [ 724.263059] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.327745] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "6163fcd4-cfe4-4432-ba8d-665319fa11ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.328048] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "6163fcd4-cfe4-4432-ba8d-665319fa11ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.331639] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08ebf74-fa43-4b40-8d27-db5025aa9c35 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.339295] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b053485b-1107-4cf8-be9c-b9d497299ff4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.367924] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.368502] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 114ac19befb44352a7be3775f08b0c1b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 724.369719] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe29ede-bdcb-49bd-851b-b4aa37e09106 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.376802] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be56c85e-240b-419f-ae7a-682820ed42e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.380991] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 114ac19befb44352a7be3775f08b0c1b [ 724.392027] env[62109]: DEBUG nova.compute.provider_tree [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.392356] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 6090ef1b1b3d423d8cd27942a831aa84 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 724.399570] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6090ef1b1b3d423d8cd27942a831aa84 [ 724.631138] env[62109]: DEBUG nova.compute.manager [req-89e3b76b-e1ec-40aa-bab7-94192f8a78da req-1b777a65-74ca-49ee-9397-b1003d0fa48c service nova] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Received event network-vif-deleted-7dc696ee-8e41-4b30-9400-d4ba7c26b170 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 724.683774] env[62109]: DEBUG nova.network.neutron [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.684415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 3caf1795b13d42fa852602d2aebcb138 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 724.692576] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3caf1795b13d42fa852602d2aebcb138 [ 724.873748] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Releasing lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.874601] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 724.874601] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 724.874813] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51bb95bb-7c85-4a0b-9a4f-9c681a1d705f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.883801] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b6250c-ed91-4264-be9e-cc6dbb544eb0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.895150] env[62109]: DEBUG nova.scheduler.client.report [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 724.897622] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 8dcc4710f8594b9fb25a0ce8cd84f948 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 724.910385] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 446bd52c-4ffb-4e77-89fb-3e8535ceb4af could not be found. [ 724.910592] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 724.910785] env[62109]: INFO nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Took 0.04 seconds to destroy the instance on the hypervisor. [ 724.911012] env[62109]: DEBUG oslo.service.loopingcall [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.911560] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dcc4710f8594b9fb25a0ce8cd84f948 [ 724.911932] env[62109]: DEBUG nova.compute.manager [-] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 724.912054] env[62109]: DEBUG nova.network.neutron [-] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 724.926741] env[62109]: DEBUG nova.network.neutron [-] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.927342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1e1f07753e9e4b09a06826189ffb224a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 724.934170] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e1f07753e9e4b09a06826189ffb224a [ 725.186592] env[62109]: INFO nova.compute.manager [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: c9b2ced5-a77c-4bff-b115-ce5c523be630] Took 1.02 seconds to deallocate network for instance. [ 725.188429] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg de2d8e7a31e4498bb3932f86de62e69c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 725.221117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de2d8e7a31e4498bb3932f86de62e69c [ 725.400582] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.401152] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 725.402803] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 586590aed56e473f96e5e8c1f78dc601 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 725.403845] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.091s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.405206] env[62109]: INFO nova.compute.claims [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.406676] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 969ff0cbd3ad485ebbc2913d3b721145 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 725.429264] env[62109]: DEBUG nova.network.neutron [-] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.430172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 585c63061c3a46daa08750750b0005ac in queue reply_7522b64acfeb4981b1f36928b040d568 [ 725.446938] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 969ff0cbd3ad485ebbc2913d3b721145 [ 725.451690] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 586590aed56e473f96e5e8c1f78dc601 [ 725.459142] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 585c63061c3a46daa08750750b0005ac [ 725.692723] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 4a39d406697546969b3c61707c39ea5a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 725.726552] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a39d406697546969b3c61707c39ea5a [ 725.910023] env[62109]: DEBUG nova.compute.utils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.910638] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 54181aaf47934d71b61a09d4aa953b8d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 725.913024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg e634afde69d943a59c047986761419a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 725.913957] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 725.914118] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 725.923526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e634afde69d943a59c047986761419a2 [ 725.924126] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54181aaf47934d71b61a09d4aa953b8d [ 725.934077] env[62109]: INFO nova.compute.manager [-] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Took 1.02 seconds to deallocate network for instance. [ 725.936938] env[62109]: DEBUG nova.compute.claims [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 725.936938] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.983344] env[62109]: DEBUG nova.policy [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6cf6e0db3914555b8ff203940ee5339', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae1d64a1d7b54a819ffa1c913bdcecaa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 726.219655] env[62109]: INFO nova.scheduler.client.report [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Deleted allocations for instance c9b2ced5-a77c-4bff-b115-ce5c523be630 [ 726.225984] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 0c2ecdd0a38548cebc6e1136d9c2f21b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 726.238001] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c2ecdd0a38548cebc6e1136d9c2f21b [ 726.385300] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Successfully created port: 2f48948e-41a3-4548-a9be-24230ffff871 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.416549] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 726.418313] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 69e99b188910468a93ec986fc7452b72 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 726.461682] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69e99b188910468a93ec986fc7452b72 [ 726.738122] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9ce9fdb-776b-482f-b3a3-b283d6fbdac8 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "c9b2ced5-a77c-4bff-b115-ce5c523be630" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.763s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.738899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 2ec5c115d6bc4563a91779a2973102e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 726.748142] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ec5c115d6bc4563a91779a2973102e7 [ 726.845812] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc04bcf8-d68f-4dfd-8f0f-19e71e3d27b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.853361] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65d0d39-7709-4dec-abcd-bf2b8f0e467d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.882381] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53b922a-27da-4211-b8d9-dae03467b818 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.889452] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd10380-0b93-4896-a8e8-df01e00ff2ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.902437] env[62109]: DEBUG nova.compute.provider_tree [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.902976] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg c057c2478ce542429a7356e353d671a7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 726.911243] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c057c2478ce542429a7356e353d671a7 [ 726.927029] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg dc402c7ac57241dc8904d62256996430 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 726.961193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc402c7ac57241dc8904d62256996430 [ 727.240953] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 727.242838] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 2f29c5e424234dd0af66c12d5bfe4239 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 727.251290] env[62109]: DEBUG nova.compute.manager [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Received event network-changed-2f48948e-41a3-4548-a9be-24230ffff871 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 727.251470] env[62109]: DEBUG nova.compute.manager [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Refreshing instance network info cache due to event network-changed-2f48948e-41a3-4548-a9be-24230ffff871. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 727.251680] env[62109]: DEBUG oslo_concurrency.lockutils [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] Acquiring lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.251813] env[62109]: DEBUG oslo_concurrency.lockutils [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] Acquired lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.251963] env[62109]: DEBUG nova.network.neutron [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Refreshing network info cache for port 2f48948e-41a3-4548-a9be-24230ffff871 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 727.252361] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] Expecting reply to msg 52cbbe2278b14bae8c04dedb0b228d7e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 727.259033] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52cbbe2278b14bae8c04dedb0b228d7e [ 727.281520] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f29c5e424234dd0af66c12d5bfe4239 [ 727.399935] env[62109]: ERROR nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. [ 727.399935] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 727.399935] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.399935] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 727.399935] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 727.399935] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 727.399935] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 727.399935] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 727.399935] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.399935] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 727.399935] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.399935] env[62109]: ERROR nova.compute.manager raise self.value [ 727.399935] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 727.399935] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 727.399935] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.399935] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 727.400448] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.400448] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 727.400448] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. [ 727.400448] env[62109]: ERROR nova.compute.manager [ 727.400448] env[62109]: Traceback (most recent call last): [ 727.400448] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 727.400448] env[62109]: listener.cb(fileno) [ 727.400448] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.400448] env[62109]: result = function(*args, **kwargs) [ 727.400448] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 727.400448] env[62109]: return func(*args, **kwargs) [ 727.400448] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.400448] env[62109]: raise e [ 727.400448] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.400448] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 727.400448] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 727.400448] env[62109]: created_port_ids = self._update_ports_for_instance( [ 727.400448] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 727.400448] env[62109]: with excutils.save_and_reraise_exception(): [ 727.400448] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.400448] env[62109]: self.force_reraise() [ 727.400448] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.400448] env[62109]: raise self.value [ 727.400448] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 727.400448] env[62109]: updated_port = self._update_port( [ 727.400448] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.400448] env[62109]: _ensure_no_port_binding_failure(port) [ 727.400448] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.400448] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 727.401263] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. [ 727.401263] env[62109]: Removing descriptor: 19 [ 727.405305] env[62109]: DEBUG nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.407981] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 3a2cd6d5307b4ea4b9f79e58fa1265ac in queue reply_7522b64acfeb4981b1f36928b040d568 [ 727.424590] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a2cd6d5307b4ea4b9f79e58fa1265ac [ 727.429381] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 727.453733] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 727.453973] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 727.454127] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.454304] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 727.454446] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.454587] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 727.454786] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 727.454931] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 727.455156] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 727.455232] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 727.455394] env[62109]: DEBUG nova.virt.hardware [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.456415] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d81f89-c952-4733-b525-44c00c0a41b4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.465687] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe6eace-2738-441b-9ef7-ebcc3da25f16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.479663] env[62109]: ERROR nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Traceback (most recent call last): [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] yield resources [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self.driver.spawn(context, instance, image_meta, [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] vm_ref = self.build_virtual_machine(instance, [ 727.479663] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] for vif in network_info: [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] return self._sync_wrapper(fn, *args, **kwargs) [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self.wait() [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self[:] = self._gt.wait() [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] return self._exit_event.wait() [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 727.480060] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] current.throw(*self._exc) [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] result = function(*args, **kwargs) [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] return func(*args, **kwargs) [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] raise e [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] nwinfo = self.network_api.allocate_for_instance( [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] created_port_ids = self._update_ports_for_instance( [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] with excutils.save_and_reraise_exception(): [ 727.480417] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self.force_reraise() [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] raise self.value [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] updated_port = self._update_port( [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] _ensure_no_port_binding_failure(port) [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] raise exception.PortBindingFailed(port_id=port['id']) [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] nova.exception.PortBindingFailed: Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. [ 727.480768] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] [ 727.480768] env[62109]: INFO nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Terminating instance [ 727.481864] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Acquiring lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.763401] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.774143] env[62109]: DEBUG nova.network.neutron [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 727.851807] env[62109]: DEBUG nova.network.neutron [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.852378] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] Expecting reply to msg e539c84178e3498da06c8c7179fdebb9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 727.864365] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e539c84178e3498da06c8c7179fdebb9 [ 727.910368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.910872] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 727.912567] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg d1d13f1952fd4effb47c63335c53c970 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 727.913567] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.636s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.915177] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 2bd7067314104248a823fd72f075c8d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 727.950308] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bd7067314104248a823fd72f075c8d0 [ 727.950994] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1d13f1952fd4effb47c63335c53c970 [ 728.117019] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.117266] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.117418] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 728.117542] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 728.118181] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 03811cef1e844869b30027c6ad3943cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 728.133151] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03811cef1e844869b30027c6ad3943cd [ 728.354866] env[62109]: DEBUG oslo_concurrency.lockutils [req-cf1db7d2-3b42-4746-bddc-52616311a135 req-2fbc2ca3-aebf-4540-bee0-0546065d05ec service nova] Releasing lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.355291] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Acquired lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.355475] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 728.356101] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg f65ef3419b634328b290b880007c6d02 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 728.364027] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f65ef3419b634328b290b880007c6d02 [ 728.418366] env[62109]: DEBUG nova.compute.utils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 728.419034] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg ad5e4c96dd204b0fb486e99f63c0a5d2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 728.422819] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 728.422987] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 728.430613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad5e4c96dd204b0fb486e99f63c0a5d2 [ 728.486795] env[62109]: DEBUG nova.policy [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82cc44e823e34c32bc9ad3c68f51c043', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '118f460d1ed848db9ce4526a46c64fdd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 728.621009] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 728.621202] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 728.621336] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 728.621460] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 728.621580] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 728.621699] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Didn't find any instances for network info cache update. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 728.621924] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.622038] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.622180] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.622320] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.622454] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.622591] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.622847] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 728.622847] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.623176] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 5d2aca2250024d73bed5ca3927a97cd2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 728.633191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d2aca2250024d73bed5ca3927a97cd2 [ 728.786960] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb590ded-2eea-41f4-bf45-0d724d807261 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.790233] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Successfully created port: 7b566737-b743-4fbb-a685-3882ecc657df {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.796279] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0fd519-820c-43a6-82c0-a8ce96d3166b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.826981] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5839c5ea-9e07-4b89-8748-50cbcf7745f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.833979] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99515e6-5323-4b89-8aa2-952d5d35414c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.846926] env[62109]: DEBUG nova.compute.provider_tree [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.847471] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 7ef4c754da5f43678c9bbb2134e4e215 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 728.854989] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ef4c754da5f43678c9bbb2134e4e215 [ 728.872508] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 728.923356] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 728.925149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 5990477813fa40cabdb82333eb9f5284 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 728.938088] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.941573] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 92aa3d8e8db84e7d9f6c9f2df2a13329 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 728.957815] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92aa3d8e8db84e7d9f6c9f2df2a13329 [ 728.961368] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5990477813fa40cabdb82333eb9f5284 [ 729.130033] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.277209] env[62109]: DEBUG nova.compute.manager [req-d5a4608f-c421-439f-aa78-97a168c69af4 req-98001538-1948-4e18-a883-0a7aeef231b1 service nova] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Received event network-vif-deleted-2f48948e-41a3-4548-a9be-24230ffff871 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 729.350175] env[62109]: DEBUG nova.scheduler.client.report [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.353192] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg bd755585965844d8ac72978499d2695a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 729.365646] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd755585965844d8ac72978499d2695a [ 729.439885] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg d12ee5194c0248cf8c8929fee66e4910 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 729.448784] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Releasing lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.449216] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 729.449410] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 729.449682] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bdb86df-f061-4ea7-ab1b-a53a236fe958 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.458474] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5577cf-c25b-46c5-904e-a1baebf2c92a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.483124] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f8e35f3-4b35-449c-9e60-1e0624f41cd2 could not be found. [ 729.483350] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 729.483531] env[62109]: INFO nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 729.483770] env[62109]: DEBUG oslo.service.loopingcall [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.484363] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d12ee5194c0248cf8c8929fee66e4910 [ 729.484755] env[62109]: DEBUG nova.compute.manager [-] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.484849] env[62109]: DEBUG nova.network.neutron [-] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 729.498906] env[62109]: DEBUG nova.network.neutron [-] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.499472] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0c2281a34bf14816a3ee4f3c80954d5b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 729.506199] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c2281a34bf14816a3ee4f3c80954d5b [ 729.856083] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.942s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.856780] env[62109]: ERROR nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] Traceback (most recent call last): [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self.driver.spawn(context, instance, image_meta, [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self._vmops.spawn(context, instance, image_meta, injected_files, [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] vm_ref = self.build_virtual_machine(instance, [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] vif_infos = vmwarevif.get_vif_info(self._session, [ 729.856780] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] for vif in network_info: [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] return self._sync_wrapper(fn, *args, **kwargs) [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self.wait() [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self[:] = self._gt.wait() [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] return self._exit_event.wait() [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] current.throw(*self._exc) [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.857092] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] result = function(*args, **kwargs) [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] return func(*args, **kwargs) [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] raise e [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] nwinfo = self.network_api.allocate_for_instance( [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] created_port_ids = self._update_ports_for_instance( [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] with excutils.save_and_reraise_exception(): [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] self.force_reraise() [ 729.857434] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] raise self.value [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] updated_port = self._update_port( [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] _ensure_no_port_binding_failure(port) [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] raise exception.PortBindingFailed(port_id=port['id']) [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] nova.exception.PortBindingFailed: Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. [ 729.857772] env[62109]: ERROR nova.compute.manager [instance: 26a287d7-4602-4d83-8828-41870a49c343] [ 729.857772] env[62109]: DEBUG nova.compute.utils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 729.858808] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.774s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.860779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 2b6fd1b8b9824a1f9b31c9cff9e290eb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 729.862497] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. [ 729.862497] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 729.862497] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.862497] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 729.862497] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 729.862497] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 729.862497] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 729.862497] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 729.862497] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.862497] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 729.862497] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.862497] env[62109]: ERROR nova.compute.manager raise self.value [ 729.862497] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 729.862497] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 729.862497] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.862497] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 729.862969] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.862969] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 729.862969] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. [ 729.862969] env[62109]: ERROR nova.compute.manager [ 729.862969] env[62109]: Traceback (most recent call last): [ 729.862969] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 729.862969] env[62109]: listener.cb(fileno) [ 729.862969] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.862969] env[62109]: result = function(*args, **kwargs) [ 729.862969] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 729.862969] env[62109]: return func(*args, **kwargs) [ 729.862969] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.862969] env[62109]: raise e [ 729.862969] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.862969] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 729.862969] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 729.862969] env[62109]: created_port_ids = self._update_ports_for_instance( [ 729.862969] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 729.862969] env[62109]: with excutils.save_and_reraise_exception(): [ 729.862969] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.862969] env[62109]: self.force_reraise() [ 729.862969] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.862969] env[62109]: raise self.value [ 729.862969] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 729.862969] env[62109]: updated_port = self._update_port( [ 729.862969] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.862969] env[62109]: _ensure_no_port_binding_failure(port) [ 729.862969] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.862969] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 729.863711] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. [ 729.863711] env[62109]: Removing descriptor: 19 [ 729.863711] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Build of instance 26a287d7-4602-4d83-8828-41870a49c343 was re-scheduled: Binding failed for port b9a39990-885d-406a-a2d2-338706184202, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 729.863833] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 729.864113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquiring lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.864214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Acquired lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.864366] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 729.864741] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg cd79e1519c40490aa1aa7c06c88619bf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 729.871191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd79e1519c40490aa1aa7c06c88619bf [ 729.892522] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b6fd1b8b9824a1f9b31c9cff9e290eb [ 729.942632] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 729.968908] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 729.969157] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 729.969312] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 729.969488] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 729.969627] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 729.969769] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 729.970146] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 729.970253] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 729.970381] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 729.970535] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 729.970760] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 729.972641] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed57d37-d6af-4890-907e-8bebb81d1cd6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.980255] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fee29e3-0418-4c10-b203-92b8653fe968 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.993539] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Traceback (most recent call last): [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] yield resources [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self.driver.spawn(context, instance, image_meta, [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self._vmops.spawn(context, instance, image_meta, injected_files, [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] vm_ref = self.build_virtual_machine(instance, [ 729.993539] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] vif_infos = vmwarevif.get_vif_info(self._session, [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] for vif in network_info: [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] return self._sync_wrapper(fn, *args, **kwargs) [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self.wait() [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self[:] = self._gt.wait() [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] return self._exit_event.wait() [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 729.993936] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] current.throw(*self._exc) [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] result = function(*args, **kwargs) [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] return func(*args, **kwargs) [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] raise e [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] nwinfo = self.network_api.allocate_for_instance( [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] created_port_ids = self._update_ports_for_instance( [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] with excutils.save_and_reraise_exception(): [ 729.994310] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self.force_reraise() [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] raise self.value [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] updated_port = self._update_port( [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] _ensure_no_port_binding_failure(port) [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] raise exception.PortBindingFailed(port_id=port['id']) [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] nova.exception.PortBindingFailed: Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. [ 729.994684] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] [ 729.994684] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Terminating instance [ 729.995789] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.995927] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquired lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.996133] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 729.996541] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg b4e9179c846742d3be8eff5d2773e5a0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 730.001803] env[62109]: DEBUG nova.network.neutron [-] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.001917] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 985bf3eb6adc4e51977e64c8e938cdd6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 730.004701] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4e9179c846742d3be8eff5d2773e5a0 [ 730.009598] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 985bf3eb6adc4e51977e64c8e938cdd6 [ 730.388570] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.485291] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.485784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg ec452235c6724078b2ddabd5fcb76e96 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 730.496050] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec452235c6724078b2ddabd5fcb76e96 [ 730.503313] env[62109]: INFO nova.compute.manager [-] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Took 1.02 seconds to deallocate network for instance. [ 730.507628] env[62109]: DEBUG nova.compute.claims [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 730.507804] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.514519] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.590286] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.590812] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 3e720fc28a5c4f9db8a04f1f1339a077 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 730.598971] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e720fc28a5c4f9db8a04f1f1339a077 [ 730.710823] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ea8c80-f9d8-462b-ac0f-ccd73fac6ac8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.719002] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980dee7e-08f1-4656-884a-dca6fad79813 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.750058] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b092b6-1200-4302-9b80-58fe6a5a40dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.756902] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea07d70a-88b7-427d-a7a5-7a14f90381b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.786043] env[62109]: DEBUG nova.compute.provider_tree [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.788417] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 824f0b103f87479181c54c6942e9867f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 730.796152] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 824f0b103f87479181c54c6942e9867f [ 730.988706] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Releasing lock "refresh_cache-26a287d7-4602-4d83-8828-41870a49c343" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.989109] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 730.989357] env[62109]: DEBUG nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 730.989529] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 731.005096] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.005685] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg ada084448ddb422eae918431220a570b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.014839] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ada084448ddb422eae918431220a570b [ 731.094631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Releasing lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.095068] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 731.095277] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 731.095689] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e5a5f42-b212-403f-a530-a9d6fd872e66 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.104196] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b30712-ab84-4fad-b057-64ba634c8285 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.124586] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c9a6d28b-52f8-4636-886a-c74f0900e761 could not be found. [ 731.124740] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 731.124912] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Took 0.03 seconds to destroy the instance on the hypervisor. [ 731.125137] env[62109]: DEBUG oslo.service.loopingcall [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.125341] env[62109]: DEBUG nova.compute.manager [-] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 731.125432] env[62109]: DEBUG nova.network.neutron [-] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 731.138826] env[62109]: DEBUG nova.network.neutron [-] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.139275] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 58ba99b6d1194f259d84126d5f28fcd5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.145257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58ba99b6d1194f259d84126d5f28fcd5 [ 731.291388] env[62109]: DEBUG nova.scheduler.client.report [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 731.294747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 84f656932c9e45758a7522d12a774511 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.306663] env[62109]: DEBUG nova.compute.manager [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Received event network-changed-7b566737-b743-4fbb-a685-3882ecc657df {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 731.306663] env[62109]: DEBUG nova.compute.manager [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Refreshing instance network info cache due to event network-changed-7b566737-b743-4fbb-a685-3882ecc657df. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 731.306663] env[62109]: DEBUG oslo_concurrency.lockutils [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] Acquiring lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.306663] env[62109]: DEBUG oslo_concurrency.lockutils [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] Acquired lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.306663] env[62109]: DEBUG nova.network.neutron [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Refreshing network info cache for port 7b566737-b743-4fbb-a685-3882ecc657df {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 731.306846] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] Expecting reply to msg ca501333a6d3468894e39f0184645bbb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.310706] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84f656932c9e45758a7522d12a774511 [ 731.311762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca501333a6d3468894e39f0184645bbb [ 731.508348] env[62109]: DEBUG nova.network.neutron [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.508928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 8b4d4abb09944dec90ac672896e80946 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.517235] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b4d4abb09944dec90ac672896e80946 [ 731.642239] env[62109]: DEBUG nova.network.neutron [-] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.642680] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bbc13c2a02ec4138a11fd6a14db9c0a1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.651782] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbc13c2a02ec4138a11fd6a14db9c0a1 [ 731.799193] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.938s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.799193] env[62109]: ERROR nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. [ 731.799193] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Traceback (most recent call last): [ 731.799193] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 731.799193] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self.driver.spawn(context, instance, image_meta, [ 731.799193] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 731.799193] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self._vmops.spawn(context, instance, image_meta, injected_files, [ 731.799193] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 731.799193] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] vm_ref = self.build_virtual_machine(instance, [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] vif_infos = vmwarevif.get_vif_info(self._session, [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] for vif in network_info: [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] return self._sync_wrapper(fn, *args, **kwargs) [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self.wait() [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self[:] = self._gt.wait() [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] return self._exit_event.wait() [ 731.799571] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] current.throw(*self._exc) [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] result = function(*args, **kwargs) [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] return func(*args, **kwargs) [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] raise e [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] nwinfo = self.network_api.allocate_for_instance( [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] created_port_ids = self._update_ports_for_instance( [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 731.799974] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] with excutils.save_and_reraise_exception(): [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] self.force_reraise() [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] raise self.value [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] updated_port = self._update_port( [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] _ensure_no_port_binding_failure(port) [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] raise exception.PortBindingFailed(port_id=port['id']) [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] nova.exception.PortBindingFailed: Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. [ 731.800398] env[62109]: ERROR nova.compute.manager [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] [ 731.800775] env[62109]: DEBUG nova.compute.utils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 731.800775] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.261s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.800935] env[62109]: INFO nova.compute.claims [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.802526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg c3282eaf67be4983ad89bab843d08710 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.803696] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Build of instance dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09 was re-scheduled: Binding failed for port 89293cc2-155b-47ed-b5ba-f18c8b302e91, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 731.804178] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 731.804413] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Acquiring lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.804558] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Acquired lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.804713] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 731.805071] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 0d50289cbc4048be97a88ff3ef0914e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.812829] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d50289cbc4048be97a88ff3ef0914e2 [ 731.820446] env[62109]: DEBUG nova.network.neutron [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.837878] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3282eaf67be4983ad89bab843d08710 [ 731.887784] env[62109]: DEBUG nova.network.neutron [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.888178] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] Expecting reply to msg 0c7234450b1048088497f687f6890c23 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 731.895860] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c7234450b1048088497f687f6890c23 [ 732.012048] env[62109]: INFO nova.compute.manager [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] [instance: 26a287d7-4602-4d83-8828-41870a49c343] Took 1.02 seconds to deallocate network for instance. [ 732.014057] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg 0b6f7ab596804c269da14e7acf905ef0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 732.049140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b6f7ab596804c269da14e7acf905ef0 [ 732.144558] env[62109]: INFO nova.compute.manager [-] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Took 1.02 seconds to deallocate network for instance. [ 732.147022] env[62109]: DEBUG nova.compute.claims [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 732.147201] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.309929] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg d444eab3595d49259e2895d0b58853da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 732.318441] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d444eab3595d49259e2895d0b58853da [ 732.329634] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 732.390702] env[62109]: DEBUG oslo_concurrency.lockutils [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] Releasing lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.390964] env[62109]: DEBUG nova.compute.manager [req-d26b11bb-ed84-4d18-a2c3-125d57c65d06 req-4ac36054-911e-4112-9388-bad6f9da2ea4 service nova] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Received event network-vif-deleted-7b566737-b743-4fbb-a685-3882ecc657df {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 732.402667] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.402667] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 77c587fcbb2d4ec5a22f0ccc6957c607 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 732.411025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77c587fcbb2d4ec5a22f0ccc6957c607 [ 732.519331] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg d3651fd9106e4fe1a83a5f97e75aaf30 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 732.549628] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3651fd9106e4fe1a83a5f97e75aaf30 [ 732.904245] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Releasing lock "refresh_cache-dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.904477] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 732.904651] env[62109]: DEBUG nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 732.904809] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 732.919238] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 732.919778] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg aaa71b3c85194eb3ad92c90e025cbe45 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 732.926532] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aaa71b3c85194eb3ad92c90e025cbe45 [ 733.043762] env[62109]: INFO nova.scheduler.client.report [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Deleted allocations for instance 26a287d7-4602-4d83-8828-41870a49c343 [ 733.049933] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Expecting reply to msg d1c41c6cf6c94e5f8344f39a0af2e9e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 733.065027] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1c41c6cf6c94e5f8344f39a0af2e9e7 [ 733.238367] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32eab1e9-c788-4652-9491-a8052e597d58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.245722] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380d9059-a649-480f-b4f3-850dd2eaca14 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.274373] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c029682-6f83-49b2-8306-6c9709ce136e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.280980] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f060b3c2-3fec-45ce-b625-bc2c38f04256 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.295149] env[62109]: DEBUG nova.compute.provider_tree [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.295626] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg c17d7894bf16438b82a6088cce246c87 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 733.303051] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c17d7894bf16438b82a6088cce246c87 [ 733.423174] env[62109]: DEBUG nova.network.neutron [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.423655] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 878ce875fe0741d090b2a9b95b6b19ad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 733.432088] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 878ce875fe0741d090b2a9b95b6b19ad [ 733.553291] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8841364b-cbc6-4765-9f39-b253815bd2e0 tempest-ServerRescueNegativeTestJSON-663683599 tempest-ServerRescueNegativeTestJSON-663683599-project-member] Lock "26a287d7-4602-4d83-8828-41870a49c343" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.556s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.553893] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg ba8ce9caa736418e9b61e6eb4f135795 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 733.564133] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba8ce9caa736418e9b61e6eb4f135795 [ 733.798732] env[62109]: DEBUG nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 733.801330] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 4991e4ab92d84d4fb74271ad5fa45071 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 733.812074] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4991e4ab92d84d4fb74271ad5fa45071 [ 733.925466] env[62109]: INFO nova.compute.manager [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] [instance: dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09] Took 1.02 seconds to deallocate network for instance. [ 733.927229] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 54199318112f428380be7c720e463db2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 733.957471] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54199318112f428380be7c720e463db2 [ 734.055843] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 734.057623] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 47847c4e01e74035a75c00e2c364b7de in queue reply_7522b64acfeb4981b1f36928b040d568 [ 734.104545] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47847c4e01e74035a75c00e2c364b7de [ 734.303603] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.504s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.304158] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 734.305790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg ab772fcaac5445978b7ba45718f83b75 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 734.306791] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.852s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.308397] env[62109]: INFO nova.compute.claims [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.309679] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg aea835b36ea54e6cb10a0a5666fafc77 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 734.343998] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aea835b36ea54e6cb10a0a5666fafc77 [ 734.348104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab772fcaac5445978b7ba45718f83b75 [ 734.431430] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg d507eecbdf944dbc960c0b53063aa948 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 734.493072] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d507eecbdf944dbc960c0b53063aa948 [ 734.574059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.822957] env[62109]: DEBUG nova.compute.utils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 734.823632] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 075c104e049344dfa781600b6592b649 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 734.825672] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 9e477bf848c744d7805e3821e35f4d7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 734.826429] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 734.826587] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 734.832894] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e477bf848c744d7805e3821e35f4d7d [ 734.835043] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 075c104e049344dfa781600b6592b649 [ 734.868717] env[62109]: DEBUG nova.policy [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82cc44e823e34c32bc9ad3c68f51c043', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '118f460d1ed848db9ce4526a46c64fdd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 734.963174] env[62109]: INFO nova.scheduler.client.report [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Deleted allocations for instance dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09 [ 734.969249] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Expecting reply to msg 635d3d9a25e6412fadee4894af09314c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 734.978704] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 635d3d9a25e6412fadee4894af09314c [ 735.218091] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Successfully created port: c2098700-1246-4d9f-9eff-78376ba87e82 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 735.329793] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 735.331596] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg d5c7e1c7e69b4c67a9c17960d0acf89b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 735.378747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5c7e1c7e69b4c67a9c17960d0acf89b [ 735.470757] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a5b9c9eb-e624-4ab4-9dd9-440374e69d57 tempest-ServerAddressesTestJSON-1380179577 tempest-ServerAddressesTestJSON-1380179577-project-member] Lock "dcdb35dd-3c01-4ba6-9a94-9ef3d49caa09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.477s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.471369] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 2a26dd8796114e8fb5e477eba9256a02 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 735.481621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a26dd8796114e8fb5e477eba9256a02 [ 735.801535] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb8ab95-f722-4944-a062-607fc4e41017 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.809688] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3337869b-ec20-49eb-90be-cada75b18123 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.841311] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg e5c8a605dfe34eb5ad88eba7211b3ffe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 735.843023] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a0a4cc-57c7-4498-a99d-8bd27e66b2b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.850613] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd70713-c2df-47d8-a4b9-ac868cd4bccd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.863728] env[62109]: DEBUG nova.compute.provider_tree [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.864264] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg da27c96a9fbc4bbd9a40425c735deec8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 735.874929] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da27c96a9fbc4bbd9a40425c735deec8 [ 735.886144] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5c8a605dfe34eb5ad88eba7211b3ffe [ 735.973529] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 735.975906] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 8e9589d9a83d47e0a63ca762864bc522 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 736.015538] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e9589d9a83d47e0a63ca762864bc522 [ 736.096416] env[62109]: DEBUG nova.compute.manager [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Received event network-changed-c2098700-1246-4d9f-9eff-78376ba87e82 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 736.096672] env[62109]: DEBUG nova.compute.manager [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Refreshing instance network info cache due to event network-changed-c2098700-1246-4d9f-9eff-78376ba87e82. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 736.096969] env[62109]: DEBUG oslo_concurrency.lockutils [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] Acquiring lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.097165] env[62109]: DEBUG oslo_concurrency.lockutils [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] Acquired lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.097516] env[62109]: DEBUG nova.network.neutron [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Refreshing network info cache for port c2098700-1246-4d9f-9eff-78376ba87e82 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 736.098385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] Expecting reply to msg e7b1c64698d54be6a0a1044a50c51dab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 736.106467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7b1c64698d54be6a0a1044a50c51dab [ 736.347676] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 736.372461] env[62109]: DEBUG nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 736.374893] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 1982efb3c95b4230ace3d481c35bd231 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 736.381030] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. [ 736.381030] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 736.381030] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.381030] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 736.381030] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.381030] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 736.381030] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.381030] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 736.381030] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.381030] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 736.381030] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.381030] env[62109]: ERROR nova.compute.manager raise self.value [ 736.381030] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.381030] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 736.381030] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.381030] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 736.381513] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.381513] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 736.381513] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. [ 736.381513] env[62109]: ERROR nova.compute.manager [ 736.381513] env[62109]: Traceback (most recent call last): [ 736.381513] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 736.381513] env[62109]: listener.cb(fileno) [ 736.381513] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.381513] env[62109]: result = function(*args, **kwargs) [ 736.381513] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 736.381513] env[62109]: return func(*args, **kwargs) [ 736.381513] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 736.381513] env[62109]: raise e [ 736.381513] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.381513] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 736.381513] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.381513] env[62109]: created_port_ids = self._update_ports_for_instance( [ 736.381513] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.381513] env[62109]: with excutils.save_and_reraise_exception(): [ 736.381513] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.381513] env[62109]: self.force_reraise() [ 736.381513] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.381513] env[62109]: raise self.value [ 736.381513] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.381513] env[62109]: updated_port = self._update_port( [ 736.381513] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.381513] env[62109]: _ensure_no_port_binding_failure(port) [ 736.381513] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.381513] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 736.382792] env[62109]: nova.exception.PortBindingFailed: Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. [ 736.382792] env[62109]: Removing descriptor: 19 [ 736.388378] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 736.388606] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 736.388754] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.388950] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 736.389109] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.389252] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 736.389445] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 736.389592] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 736.389748] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 736.389898] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 736.390056] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 736.391222] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936ce145-798b-4f39-96ca-b35d031032e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.393800] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1982efb3c95b4230ace3d481c35bd231 [ 736.399886] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e916310c-d59b-48c2-8685-d641b92d548e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.414074] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Traceback (most recent call last): [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] yield resources [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self.driver.spawn(context, instance, image_meta, [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] vm_ref = self.build_virtual_machine(instance, [ 736.414074] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] vif_infos = vmwarevif.get_vif_info(self._session, [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] for vif in network_info: [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] return self._sync_wrapper(fn, *args, **kwargs) [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self.wait() [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self[:] = self._gt.wait() [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] return self._exit_event.wait() [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 736.414492] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] current.throw(*self._exc) [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] result = function(*args, **kwargs) [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] return func(*args, **kwargs) [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] raise e [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] nwinfo = self.network_api.allocate_for_instance( [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] created_port_ids = self._update_ports_for_instance( [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] with excutils.save_and_reraise_exception(): [ 736.414923] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self.force_reraise() [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] raise self.value [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] updated_port = self._update_port( [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] _ensure_no_port_binding_failure(port) [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] raise exception.PortBindingFailed(port_id=port['id']) [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] nova.exception.PortBindingFailed: Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. [ 736.415357] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] [ 736.415357] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Terminating instance [ 736.416734] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.497203] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.621235] env[62109]: DEBUG nova.network.neutron [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 736.721583] env[62109]: DEBUG nova.network.neutron [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.722061] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] Expecting reply to msg 4c231c22c60b4209a742ee7ed0509fd9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 736.731548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c231c22c60b4209a742ee7ed0509fd9 [ 736.883041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.883041] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 736.883041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg a9f618180dc449c4b687e2df0b3478c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 736.883041] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.184s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.884708] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 86f7e16a2aef4bceaff5ea0f535f7a87 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 736.923050] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9f618180dc449c4b687e2df0b3478c2 [ 736.941341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86f7e16a2aef4bceaff5ea0f535f7a87 [ 737.224081] env[62109]: DEBUG oslo_concurrency.lockutils [req-fe03b649-b304-4bf3-ab0a-17309f0edb15 req-5881ec3e-825f-449d-b940-e52f0310e1ac service nova] Releasing lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.224468] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquired lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.224639] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 737.225066] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 0eaa5cfe518a424fbccd8d75a9901022 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 737.234285] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0eaa5cfe518a424fbccd8d75a9901022 [ 737.395348] env[62109]: DEBUG nova.compute.utils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 737.396056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 29cbe0c22903405fb1aecbfbfc66d4e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 737.397762] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 737.399371] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 737.413499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29cbe0c22903405fb1aecbfbfc66d4e7 [ 737.464750] env[62109]: DEBUG nova.policy [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82cc44e823e34c32bc9ad3c68f51c043', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '118f460d1ed848db9ce4526a46c64fdd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 737.748872] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.795074] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Successfully created port: 2051acbf-f948-4fb9-b38a-44883022ff72 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.798970] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910aff52-9a90-4ccd-b898-f703c025c365 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.806226] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e4e3f6-7bd1-4d4c-a50d-28d8f9d82990 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.835983] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60780f6-bbd1-4acb-a2bb-85d2b0f14794 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.839223] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.839711] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg da94a6c4ea4047bea31c41a851e55d47 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 737.846806] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc69cc3-be30-4000-b188-e0839626eb9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.849368] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da94a6c4ea4047bea31c41a851e55d47 [ 737.859275] env[62109]: DEBUG nova.compute.provider_tree [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.859760] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 46c394f78a8b4ee19f908eb71ac68c18 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 737.868329] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46c394f78a8b4ee19f908eb71ac68c18 [ 737.902624] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 737.904287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg e06411eebf834951ad92e2091b768ae3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 737.945498] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e06411eebf834951ad92e2091b768ae3 [ 738.123513] env[62109]: DEBUG nova.compute.manager [req-941e6645-c4f2-4cb2-b20f-6eb56bac5c6b req-81996f71-54a4-48ce-a4de-106a570f6e8e service nova] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Received event network-vif-deleted-c2098700-1246-4d9f-9eff-78376ba87e82 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 738.342408] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Releasing lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.342848] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 738.343046] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 738.343340] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc56029e-f22b-435c-adaf-6e114e9c8fbf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.351965] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45c305b-1541-4448-b8b5-c373a348e886 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.366596] env[62109]: DEBUG nova.scheduler.client.report [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.368826] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg ef51d2b9bb674fa0a20326e242cf5060 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 738.380514] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4 could not be found. [ 738.380696] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 738.380869] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 738.381145] env[62109]: DEBUG oslo.service.loopingcall [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.381371] env[62109]: DEBUG nova.compute.manager [-] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 738.381464] env[62109]: DEBUG nova.network.neutron [-] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 738.391489] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef51d2b9bb674fa0a20326e242cf5060 [ 738.401552] env[62109]: DEBUG nova.network.neutron [-] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.402029] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 875cc20799de492b8a16011a70f750d6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 738.408577] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 97d9d8f884dd4bcc853c9fea81bb14c3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 738.409651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 875cc20799de492b8a16011a70f750d6 [ 738.441938] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97d9d8f884dd4bcc853c9fea81bb14c3 [ 738.871270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.989s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.871922] env[62109]: ERROR nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Traceback (most recent call last): [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self.driver.spawn(context, instance, image_meta, [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] vm_ref = self.build_virtual_machine(instance, [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 738.871922] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] for vif in network_info: [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] return self._sync_wrapper(fn, *args, **kwargs) [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self.wait() [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self[:] = self._gt.wait() [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] return self._exit_event.wait() [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] result = hub.switch() [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 738.872384] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] return self.greenlet.switch() [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] result = function(*args, **kwargs) [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] return func(*args, **kwargs) [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] raise e [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] nwinfo = self.network_api.allocate_for_instance( [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] created_port_ids = self._update_ports_for_instance( [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] with excutils.save_and_reraise_exception(): [ 738.872797] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] self.force_reraise() [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] raise self.value [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] updated_port = self._update_port( [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] _ensure_no_port_binding_failure(port) [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] raise exception.PortBindingFailed(port_id=port['id']) [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] nova.exception.PortBindingFailed: Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. [ 738.873267] env[62109]: ERROR nova.compute.manager [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] [ 738.873640] env[62109]: DEBUG nova.compute.utils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 738.874040] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.646s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.875510] env[62109]: INFO nova.compute.claims [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.877205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg b27b775e61e94478843cbd92d2941b77 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 738.878528] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Build of instance 29715a53-7a71-4708-b522-e678fe5bd6a9 was re-scheduled: Binding failed for port a59f3c1f-3c1e-42a5-9305-85d22fee72e2, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 738.878959] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 738.879174] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Acquiring lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.879310] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Acquired lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.879458] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 738.879824] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg a62403d17cb94a77acf59b328344f0f4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 738.896306] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a62403d17cb94a77acf59b328344f0f4 [ 738.903690] env[62109]: DEBUG nova.network.neutron [-] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.904178] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 68aaaf59edb244dd8d4d822f4e6c7c0d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 738.912180] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 738.929924] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b27b775e61e94478843cbd92d2941b77 [ 738.933928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68aaaf59edb244dd8d4d822f4e6c7c0d [ 738.939154] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 738.939383] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 738.939528] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.939716] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 738.939854] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.939992] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 738.940203] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 738.940351] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 738.940505] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 738.940656] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 738.940816] env[62109]: DEBUG nova.virt.hardware [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 738.941884] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3207228a-801a-4113-8c0a-dfd587cc604e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.950168] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3849b8-2eb2-4dea-bb3d-a5b173933dc5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.066104] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. [ 739.066104] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 739.066104] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.066104] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 739.066104] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 739.066104] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 739.066104] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 739.066104] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 739.066104] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.066104] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 739.066104] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.066104] env[62109]: ERROR nova.compute.manager raise self.value [ 739.066104] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 739.066104] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 739.066104] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.066104] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 739.066619] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.066619] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 739.066619] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. [ 739.066619] env[62109]: ERROR nova.compute.manager [ 739.066619] env[62109]: Traceback (most recent call last): [ 739.066619] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 739.066619] env[62109]: listener.cb(fileno) [ 739.066619] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 739.066619] env[62109]: result = function(*args, **kwargs) [ 739.066619] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 739.066619] env[62109]: return func(*args, **kwargs) [ 739.066619] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 739.066619] env[62109]: raise e [ 739.066619] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.066619] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 739.066619] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 739.066619] env[62109]: created_port_ids = self._update_ports_for_instance( [ 739.066619] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 739.066619] env[62109]: with excutils.save_and_reraise_exception(): [ 739.066619] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.066619] env[62109]: self.force_reraise() [ 739.066619] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.066619] env[62109]: raise self.value [ 739.066619] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 739.066619] env[62109]: updated_port = self._update_port( [ 739.066619] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.066619] env[62109]: _ensure_no_port_binding_failure(port) [ 739.066619] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.066619] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 739.067724] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. [ 739.067724] env[62109]: Removing descriptor: 19 [ 739.067724] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Traceback (most recent call last): [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] yield resources [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self.driver.spawn(context, instance, image_meta, [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 739.067724] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] vm_ref = self.build_virtual_machine(instance, [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] vif_infos = vmwarevif.get_vif_info(self._session, [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] for vif in network_info: [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] return self._sync_wrapper(fn, *args, **kwargs) [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self.wait() [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self[:] = self._gt.wait() [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] return self._exit_event.wait() [ 739.068272] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] result = hub.switch() [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] return self.greenlet.switch() [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] result = function(*args, **kwargs) [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] return func(*args, **kwargs) [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] raise e [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] nwinfo = self.network_api.allocate_for_instance( [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 739.068863] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] created_port_ids = self._update_ports_for_instance( [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] with excutils.save_and_reraise_exception(): [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self.force_reraise() [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] raise self.value [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] updated_port = self._update_port( [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] _ensure_no_port_binding_failure(port) [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.069316] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] raise exception.PortBindingFailed(port_id=port['id']) [ 739.069803] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] nova.exception.PortBindingFailed: Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. [ 739.069803] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] [ 739.069803] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Terminating instance [ 739.069803] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.069803] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquired lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.070038] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 739.070350] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 040dd4d14c53443fa6e0a0bf0fc94b3d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 739.077613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 040dd4d14c53443fa6e0a0bf0fc94b3d [ 739.383284] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 3f92d2ef5b1f4c738b1925420bf66636 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 739.390928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f92d2ef5b1f4c738b1925420bf66636 [ 739.406005] env[62109]: INFO nova.compute.manager [-] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Took 1.02 seconds to deallocate network for instance. [ 739.408074] env[62109]: DEBUG nova.compute.claims [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 739.408264] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.419916] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.533159] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.533968] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 1e2888106a0f4f1c826811903ce28603 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 739.542104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e2888106a0f4f1c826811903ce28603 [ 739.589919] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.660266] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.660852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 7f6ce4117a444fa5a21f0153d623e2e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 739.669417] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f6ce4117a444fa5a21f0153d623e2e7 [ 740.036678] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Releasing lock "refresh_cache-29715a53-7a71-4708-b522-e678fe5bd6a9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.036923] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 740.037250] env[62109]: DEBUG nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 740.037321] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 740.053715] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.054294] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg a5c215d1d1e44e109e1561376802bf53 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 740.061109] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5c215d1d1e44e109e1561376802bf53 [ 740.162792] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Releasing lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.163564] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 740.163991] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 740.164467] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7a45ebd-63c4-4f10-a3df-0cd44583d08b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.171374] env[62109]: DEBUG nova.compute.manager [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Received event network-changed-2051acbf-f948-4fb9-b38a-44883022ff72 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 740.171725] env[62109]: DEBUG nova.compute.manager [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Refreshing instance network info cache due to event network-changed-2051acbf-f948-4fb9-b38a-44883022ff72. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 740.172105] env[62109]: DEBUG oslo_concurrency.lockutils [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] Acquiring lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.172475] env[62109]: DEBUG oslo_concurrency.lockutils [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] Acquired lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.172780] env[62109]: DEBUG nova.network.neutron [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Refreshing network info cache for port 2051acbf-f948-4fb9-b38a-44883022ff72 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 740.173364] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] Expecting reply to msg 28f9bce5a7954d4dba84ecfc604957f2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 740.182845] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28f9bce5a7954d4dba84ecfc604957f2 [ 740.184802] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31246ab6-1d23-49cc-8740-348d39907511 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.209983] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9f77d364-928f-4595-9253-8bb216b9215b could not be found. [ 740.210250] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 740.210416] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 740.210651] env[62109]: DEBUG oslo.service.loopingcall [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.210868] env[62109]: DEBUG nova.compute.manager [-] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 740.210962] env[62109]: DEBUG nova.network.neutron [-] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 740.231079] env[62109]: DEBUG nova.network.neutron [-] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.231600] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ecf44665c27b4f3999d8cec086647e9b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 740.240187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecf44665c27b4f3999d8cec086647e9b [ 740.380881] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4aec22-a82c-4d62-9a08-cf04eb7bbe31 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.391122] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c40cc9-269a-4c1c-9db5-6d11b9d6b394 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.427173] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62d23ec-7fcc-4e31-9454-2f8dc968f396 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.433054] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be079c79-980d-4e81-b3b7-2232ccdbd3fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.446694] env[62109]: DEBUG nova.compute.provider_tree [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.447176] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d1e8fe82c35f4541990a89580a6439cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 740.459621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1e8fe82c35f4541990a89580a6439cd [ 740.557115] env[62109]: DEBUG nova.network.neutron [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.557647] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 6c61ba8f34f144f4b7af50fef0617241 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 740.565324] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c61ba8f34f144f4b7af50fef0617241 [ 740.702773] env[62109]: DEBUG nova.network.neutron [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.734619] env[62109]: DEBUG nova.network.neutron [-] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.735095] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b77e6c7d5fc44c39bb6096579a08fecb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 740.753667] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b77e6c7d5fc44c39bb6096579a08fecb [ 740.838625] env[62109]: DEBUG nova.network.neutron [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.838625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] Expecting reply to msg a059cad121b7461396dc71e6f06fecc5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 740.858822] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a059cad121b7461396dc71e6f06fecc5 [ 740.949813] env[62109]: DEBUG nova.scheduler.client.report [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.952410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 15cd9faf809e4f2394637435c69953ef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 740.963574] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15cd9faf809e4f2394637435c69953ef [ 741.059595] env[62109]: INFO nova.compute.manager [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] [instance: 29715a53-7a71-4708-b522-e678fe5bd6a9] Took 1.02 seconds to deallocate network for instance. [ 741.061661] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 431b0f85947947e2a21ab073a73e41f2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 741.118689] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 431b0f85947947e2a21ab073a73e41f2 [ 741.237106] env[62109]: INFO nova.compute.manager [-] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Took 1.03 seconds to deallocate network for instance. [ 741.239338] env[62109]: DEBUG nova.compute.claims [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 741.239510] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.339874] env[62109]: DEBUG oslo_concurrency.lockutils [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] Releasing lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.340185] env[62109]: DEBUG nova.compute.manager [req-649381be-e649-4750-8e0c-da23f587c578 req-021372a8-718b-4afa-8a3c-524528bcd0dc service nova] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Received event network-vif-deleted-2051acbf-f948-4fb9-b38a-44883022ff72 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 741.398109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Acquiring lock "252b7e84-4f91-4078-a81c-392d622b6ce2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.398343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Lock "252b7e84-4f91-4078-a81c-392d622b6ce2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.455661] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.456320] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 741.458024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg f3b00b60536b43e19b14563cf9627ad4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 741.459084] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.522s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.461664] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 3db4052849db4cdeb39a00ed7a033328 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 741.515308] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3b00b60536b43e19b14563cf9627ad4 [ 741.516563] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3db4052849db4cdeb39a00ed7a033328 [ 741.566495] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg 1a0bedc3da75416ca77ed4a8ae2fe377 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 741.600816] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a0bedc3da75416ca77ed4a8ae2fe377 [ 741.964744] env[62109]: DEBUG nova.compute.utils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.965441] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg dc9998ccdf734737852c9a2d27b20951 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 741.970197] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 741.970197] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 741.981410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc9998ccdf734737852c9a2d27b20951 [ 742.015555] env[62109]: DEBUG nova.policy [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d003a5943d141cc9165e43148dec381', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a669cc62964b05bdaa71442c08a566', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 742.096192] env[62109]: INFO nova.scheduler.client.report [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Deleted allocations for instance 29715a53-7a71-4708-b522-e678fe5bd6a9 [ 742.104840] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Expecting reply to msg d5587be97fcb403b9d0cb644883984f2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 742.132378] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5587be97fcb403b9d0cb644883984f2 [ 742.384762] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Successfully created port: bd3d8b29-3381-4851-99c3-877677b6056f {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.419288] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbcf7a0-7fe9-44ef-935d-e06b4d344fa4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.428144] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac30bb5d-39f0-4a52-8f38-9551b2bf97c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.471000] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 742.472698] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 127c147384cb46028d7d884ed71e2f53 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 742.475493] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdb3752-4a13-4a71-a478-69de57d43061 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.483950] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fa5588-f5bd-4289-9816-84aa9d1b68ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.499538] env[62109]: DEBUG nova.compute.provider_tree [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.500292] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 2083083ef7714c64b18ba029adedb477 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 742.510700] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 127c147384cb46028d7d884ed71e2f53 [ 742.510700] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2083083ef7714c64b18ba029adedb477 [ 742.609239] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8f8d7f0-f1b8-41ca-b637-5158f1da0102 tempest-ServerActionsTestOtherB-638853083 tempest-ServerActionsTestOtherB-638853083-project-member] Lock "29715a53-7a71-4708-b522-e678fe5bd6a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.944s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.610054] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 35cb4b0f6de24202bbab6fa1977e2099 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 742.623112] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35cb4b0f6de24202bbab6fa1977e2099 [ 742.977862] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d0c887a241f14c4e8e4a81819b0a311f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 743.006481] env[62109]: DEBUG nova.scheduler.client.report [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.009112] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg b4230e42a8ef4782b82ad9e6ccf68197 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 743.010834] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0c887a241f14c4e8e4a81819b0a311f [ 743.029592] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4230e42a8ef4782b82ad9e6ccf68197 [ 743.112147] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 743.113966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 30a52325ed7f40fca422c11d8b4e881b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 743.148348] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30a52325ed7f40fca422c11d8b4e881b [ 743.386448] env[62109]: DEBUG nova.compute.manager [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Received event network-changed-bd3d8b29-3381-4851-99c3-877677b6056f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 743.386641] env[62109]: DEBUG nova.compute.manager [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Refreshing instance network info cache due to event network-changed-bd3d8b29-3381-4851-99c3-877677b6056f. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 743.386857] env[62109]: DEBUG oslo_concurrency.lockutils [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] Acquiring lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.386998] env[62109]: DEBUG oslo_concurrency.lockutils [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] Acquired lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.387156] env[62109]: DEBUG nova.network.neutron [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Refreshing network info cache for port bd3d8b29-3381-4851-99c3-877677b6056f {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 743.387565] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] Expecting reply to msg 1784d4e41aca45a29547dc1a21c5d4de in queue reply_7522b64acfeb4981b1f36928b040d568 [ 743.394316] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1784d4e41aca45a29547dc1a21c5d4de [ 743.481594] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 743.510407] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 743.510407] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 743.510407] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.510631] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 743.510631] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.510631] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 743.510836] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 743.510942] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 743.511093] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 743.511223] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 743.511384] env[62109]: DEBUG nova.virt.hardware [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 743.512112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.053s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.512692] env[62109]: ERROR nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Traceback (most recent call last): [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self.driver.spawn(context, instance, image_meta, [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] vm_ref = self.build_virtual_machine(instance, [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] vif_infos = vmwarevif.get_vif_info(self._session, [ 743.512692] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] for vif in network_info: [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] return self._sync_wrapper(fn, *args, **kwargs) [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self.wait() [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self[:] = self._gt.wait() [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] return self._exit_event.wait() [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] current.throw(*self._exc) [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.513101] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] result = function(*args, **kwargs) [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] return func(*args, **kwargs) [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] raise e [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] nwinfo = self.network_api.allocate_for_instance( [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] created_port_ids = self._update_ports_for_instance( [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] with excutils.save_and_reraise_exception(): [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] self.force_reraise() [ 743.513489] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] raise self.value [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] updated_port = self._update_port( [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] _ensure_no_port_binding_failure(port) [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] raise exception.PortBindingFailed(port_id=port['id']) [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] nova.exception.PortBindingFailed: Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. [ 743.513855] env[62109]: ERROR nova.compute.manager [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] [ 743.513855] env[62109]: DEBUG nova.compute.utils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 743.514920] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Build of instance 446bd52c-4ffb-4e77-89fb-3e8535ceb4af was re-scheduled: Binding failed for port 7dc696ee-8e41-4b30-9400-d4ba7c26b170, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 743.515334] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 743.515553] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.515733] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquired lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.515898] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 743.516967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg ef196a5233294779aaa80433ec10cb9d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 743.518181] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f9abde-ff89-48e3-b1fc-2492f038084e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.521428] env[62109]: ERROR nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. [ 743.521428] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 743.521428] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.521428] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 743.521428] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 743.521428] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 743.521428] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 743.521428] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 743.521428] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.521428] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 743.521428] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.521428] env[62109]: ERROR nova.compute.manager raise self.value [ 743.521428] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 743.521428] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 743.521428] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.521428] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 743.521966] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.521966] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 743.521966] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. [ 743.521966] env[62109]: ERROR nova.compute.manager [ 743.521966] env[62109]: Traceback (most recent call last): [ 743.521966] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 743.521966] env[62109]: listener.cb(fileno) [ 743.521966] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.521966] env[62109]: result = function(*args, **kwargs) [ 743.521966] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 743.521966] env[62109]: return func(*args, **kwargs) [ 743.521966] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.521966] env[62109]: raise e [ 743.521966] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.521966] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 743.521966] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 743.521966] env[62109]: created_port_ids = self._update_ports_for_instance( [ 743.521966] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 743.521966] env[62109]: with excutils.save_and_reraise_exception(): [ 743.521966] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.521966] env[62109]: self.force_reraise() [ 743.521966] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.521966] env[62109]: raise self.value [ 743.521966] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 743.521966] env[62109]: updated_port = self._update_port( [ 743.521966] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.521966] env[62109]: _ensure_no_port_binding_failure(port) [ 743.521966] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.521966] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 743.522796] env[62109]: nova.exception.PortBindingFailed: Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. [ 743.522796] env[62109]: Removing descriptor: 19 [ 743.522796] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.759s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.523890] env[62109]: INFO nova.compute.claims [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.526116] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 0ec08995071c449381e4cca821072978 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 743.527526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef196a5233294779aaa80433ec10cb9d [ 743.533888] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c24e9a-5374-4c0c-ab6c-63b87e0347f5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.549183] env[62109]: ERROR nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Traceback (most recent call last): [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] yield resources [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self.driver.spawn(context, instance, image_meta, [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] vm_ref = self.build_virtual_machine(instance, [ 743.549183] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] vif_infos = vmwarevif.get_vif_info(self._session, [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] for vif in network_info: [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] return self._sync_wrapper(fn, *args, **kwargs) [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self.wait() [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self[:] = self._gt.wait() [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] return self._exit_event.wait() [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 743.549610] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] current.throw(*self._exc) [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] result = function(*args, **kwargs) [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] return func(*args, **kwargs) [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] raise e [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] nwinfo = self.network_api.allocate_for_instance( [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] created_port_ids = self._update_ports_for_instance( [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] with excutils.save_and_reraise_exception(): [ 743.550026] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self.force_reraise() [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] raise self.value [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] updated_port = self._update_port( [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] _ensure_no_port_binding_failure(port) [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] raise exception.PortBindingFailed(port_id=port['id']) [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] nova.exception.PortBindingFailed: Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. [ 743.550386] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] [ 743.550386] env[62109]: INFO nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Terminating instance [ 743.552208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.562759] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ec08995071c449381e4cca821072978 [ 743.638346] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.904861] env[62109]: DEBUG nova.network.neutron [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 743.977808] env[62109]: DEBUG nova.network.neutron [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.978447] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] Expecting reply to msg fa4d7768314f4c82853ca2e7e33bcdc2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 743.986236] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa4d7768314f4c82853ca2e7e33bcdc2 [ 744.029366] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 2f04e1f43c614690a5d5e13c6609e281 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 744.032926] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 744.036783] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f04e1f43c614690a5d5e13c6609e281 [ 744.119131] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.119679] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 798ba0ba9e95497cab4f1cb7b3e9a8bb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 744.127741] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 798ba0ba9e95497cab4f1cb7b3e9a8bb [ 744.481265] env[62109]: DEBUG oslo_concurrency.lockutils [req-4bbef00f-7c61-48df-be3b-47d9b644a6f5 req-4b258b2f-8cc1-4c27-9bb8-04dd40b5b4f9 service nova] Releasing lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.481698] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.482073] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 744.482319] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 33399f8827d149a58dd1805d54b773a3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 744.491447] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33399f8827d149a58dd1805d54b773a3 [ 744.621566] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Releasing lock "refresh_cache-446bd52c-4ffb-4e77-89fb-3e8535ceb4af" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.621838] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 744.622027] env[62109]: DEBUG nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 744.622195] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 744.653825] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 744.654502] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 17e9886f7e09462c8f2466549f371098 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 744.662317] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17e9886f7e09462c8f2466549f371098 [ 744.991331] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca532f96-bffa-4560-adf5-8e9bc5e3a0a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.001019] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2be3744-d5c7-47df-a664-42264484b111 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.005586] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 745.038182] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea74d60-1c27-43d6-8c24-2e8ae02996e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.046304] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9797016b-05c0-46a7-94a1-a8a605f7d8be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.064349] env[62109]: DEBUG nova.compute.provider_tree [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.064644] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 31028def939048dd85ca017fae306684 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 745.072158] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31028def939048dd85ca017fae306684 [ 745.129871] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.130416] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 87d533b8425e49a6bde342056da36ec5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 745.139876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87d533b8425e49a6bde342056da36ec5 [ 745.156951] env[62109]: DEBUG nova.network.neutron [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.157808] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 08e0b48db6714607a096d93c691650ed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 745.167683] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08e0b48db6714607a096d93c691650ed [ 745.441408] env[62109]: DEBUG nova.compute.manager [req-a77bdcf7-d783-4daa-927b-fa3e41a4ab70 req-dcac3c23-b648-4473-9ebe-fb1faa61cc15 service nova] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Received event network-vif-deleted-bd3d8b29-3381-4851-99c3-877677b6056f {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 745.569581] env[62109]: DEBUG nova.scheduler.client.report [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.572150] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 966db4020cbe4faa8118e9db3ff6c292 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 745.587869] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 966db4020cbe4faa8118e9db3ff6c292 [ 745.633558] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.633558] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 745.633558] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 745.633805] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be43ac2d-6db4-468c-a543-5ad5632094c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.643402] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9691104d-ef0f-4532-8b05-0524fe528ef3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.660177] env[62109]: INFO nova.compute.manager [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 446bd52c-4ffb-4e77-89fb-3e8535ceb4af] Took 1.04 seconds to deallocate network for instance. [ 745.661978] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg e1b8e8dc316e4d64ae8c5ea07c8db3a8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 745.666509] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 832c9ce1-6344-485a-a9ef-6950d1c78ef9 could not be found. [ 745.666716] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 745.666892] env[62109]: INFO nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Took 0.03 seconds to destroy the instance on the hypervisor. [ 745.667134] env[62109]: DEBUG oslo.service.loopingcall [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.667578] env[62109]: DEBUG nova.compute.manager [-] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 745.667674] env[62109]: DEBUG nova.network.neutron [-] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 745.683244] env[62109]: DEBUG nova.network.neutron [-] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 745.683777] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b53de3a20f1849cd88ad8f62bab3ff0e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 745.690701] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b53de3a20f1849cd88ad8f62bab3ff0e [ 745.702039] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1b8e8dc316e4d64ae8c5ea07c8db3a8 [ 746.074552] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.075100] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 746.076922] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg ec52f4bbd34d44e485e28cac85ee5073 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 746.078083] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.948s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.084025] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.084025] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 746.084025] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.571s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.084025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 8f22cc75738148e88423de7eca0336e0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 746.084025] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4131b46-a23c-4bb7-b8aa-4b9252888d79 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.092164] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab7f887-185b-4fc5-8509-37bd2e5ef1f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.105586] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3263426-d1b9-4f09-ad06-b63818dee4b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.112055] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec52f4bbd34d44e485e28cac85ee5073 [ 746.113373] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf39b43-2b7e-414c-b1b0-dd9df44a7350 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.119203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f22cc75738148e88423de7eca0336e0 [ 746.146134] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181675MB free_disk=124GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 746.146292] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.170293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 3a13f85f64834f0d9024932432687960 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 746.186066] env[62109]: DEBUG nova.network.neutron [-] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.186590] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 445b4fa42ed74297bc1d495b98968c5e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 746.194539] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 445b4fa42ed74297bc1d495b98968c5e [ 746.203548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a13f85f64834f0d9024932432687960 [ 746.583774] env[62109]: DEBUG nova.compute.utils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.584365] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 48567d89ed9548edb7bc5a2d0e07384f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 746.585278] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 746.585447] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 746.598273] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48567d89ed9548edb7bc5a2d0e07384f [ 746.635994] env[62109]: DEBUG nova.policy [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb5d486660a54e71a827e082e050bde5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9450f96e008846c39430838ba9f1ec08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 746.691176] env[62109]: INFO nova.compute.manager [-] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Took 1.02 seconds to deallocate network for instance. [ 746.696764] env[62109]: DEBUG nova.compute.claims [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 746.697112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.698127] env[62109]: INFO nova.scheduler.client.report [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Deleted allocations for instance 446bd52c-4ffb-4e77-89fb-3e8535ceb4af [ 746.707535] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 10b4c750a1134e639053d4b166784dd2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 746.735559] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10b4c750a1134e639053d4b166784dd2 [ 746.924760] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 88008dcdc8144d92895c874daeda6f1a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 746.940556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88008dcdc8144d92895c874daeda6f1a [ 746.973455] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfbe574-f4ee-45c1-86be-e126467d05ea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.978203] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec384a10-f55d-4437-a874-8221997fa9ee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.021983] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Successfully created port: d19cd207-7c44-4737-9b63-dd316aa946f5 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.024430] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ff99a6-4fd3-46e0-95e1-ffd057f25d61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.032202] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75fd9a4-f0ec-476e-9546-0ccc17e1fa38 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.046794] env[62109]: DEBUG nova.compute.provider_tree [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.046794] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 9303c7cfe5724886b694ad24a39c9d1e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 747.056467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9303c7cfe5724886b694ad24a39c9d1e [ 747.088966] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 747.090898] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg b42d9e3fbfde4db99b5789e803522596 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 747.125002] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b42d9e3fbfde4db99b5789e803522596 [ 747.213623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f13d282e-55e7-4a27-be61-3cbafeaaa1aa tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "446bd52c-4ffb-4e77-89fb-3e8535ceb4af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.595s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.214232] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg cfc061a8f5fb472c901862efb104f733 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 747.224347] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfc061a8f5fb472c901862efb104f733 [ 747.557271] env[62109]: DEBUG nova.scheduler.client.report [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.559837] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 3b9a9ade5ee64c75b48a44494a6c134b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 747.574231] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b9a9ade5ee64c75b48a44494a6c134b [ 747.595774] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 61bdb29e573543728c9d8b3fb6f66ee5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 747.634525] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61bdb29e573543728c9d8b3fb6f66ee5 [ 747.721362] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 747.723205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg b1507333639f4a2cb01e394d6ca30750 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 747.758358] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1507333639f4a2cb01e394d6ca30750 [ 747.922967] env[62109]: DEBUG nova.compute.manager [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Received event network-changed-d19cd207-7c44-4737-9b63-dd316aa946f5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 747.923167] env[62109]: DEBUG nova.compute.manager [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Refreshing instance network info cache due to event network-changed-d19cd207-7c44-4737-9b63-dd316aa946f5. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 747.923378] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] Acquiring lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.923518] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] Acquired lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.923675] env[62109]: DEBUG nova.network.neutron [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Refreshing network info cache for port d19cd207-7c44-4737-9b63-dd316aa946f5 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 747.924095] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] Expecting reply to msg b04b8079a9eb465c9f021a610a28b3e1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 747.931140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b04b8079a9eb465c9f021a610a28b3e1 [ 748.062242] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.983s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.062865] env[62109]: ERROR nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Traceback (most recent call last): [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self.driver.spawn(context, instance, image_meta, [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] vm_ref = self.build_virtual_machine(instance, [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.062865] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] for vif in network_info: [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] return self._sync_wrapper(fn, *args, **kwargs) [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self.wait() [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self[:] = self._gt.wait() [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] return self._exit_event.wait() [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] current.throw(*self._exc) [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.063225] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] result = function(*args, **kwargs) [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] return func(*args, **kwargs) [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] raise e [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] nwinfo = self.network_api.allocate_for_instance( [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] created_port_ids = self._update_ports_for_instance( [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] with excutils.save_and_reraise_exception(): [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] self.force_reraise() [ 748.063595] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] raise self.value [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] updated_port = self._update_port( [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] _ensure_no_port_binding_failure(port) [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] raise exception.PortBindingFailed(port_id=port['id']) [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] nova.exception.PortBindingFailed: Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. [ 748.063934] env[62109]: ERROR nova.compute.manager [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] [ 748.063934] env[62109]: DEBUG nova.compute.utils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 748.065272] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.918s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.066972] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg c21670d45bf74ed3a701adcc3f269228 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 748.068504] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Build of instance 6f8e35f3-4b35-449c-9e60-1e0624f41cd2 was re-scheduled: Binding failed for port 2f48948e-41a3-4548-a9be-24230ffff871, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 748.069059] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 748.069223] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Acquiring lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.069364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Acquired lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.069521] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.069877] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 726dd23c6f0f4468bf66e4a9fb163b13 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 748.079614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 726dd23c6f0f4468bf66e4a9fb163b13 [ 748.082677] env[62109]: ERROR nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. [ 748.082677] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 748.082677] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.082677] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 748.082677] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.082677] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 748.082677] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.082677] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 748.082677] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.082677] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 748.082677] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.082677] env[62109]: ERROR nova.compute.manager raise self.value [ 748.082677] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.082677] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 748.082677] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.082677] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 748.083121] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.083121] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 748.083121] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. [ 748.083121] env[62109]: ERROR nova.compute.manager [ 748.083121] env[62109]: Traceback (most recent call last): [ 748.083121] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 748.083121] env[62109]: listener.cb(fileno) [ 748.083121] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.083121] env[62109]: result = function(*args, **kwargs) [ 748.083121] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.083121] env[62109]: return func(*args, **kwargs) [ 748.083121] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.083121] env[62109]: raise e [ 748.083121] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.083121] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 748.083121] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.083121] env[62109]: created_port_ids = self._update_ports_for_instance( [ 748.083121] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.083121] env[62109]: with excutils.save_and_reraise_exception(): [ 748.083121] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.083121] env[62109]: self.force_reraise() [ 748.083121] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.083121] env[62109]: raise self.value [ 748.083121] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.083121] env[62109]: updated_port = self._update_port( [ 748.083121] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.083121] env[62109]: _ensure_no_port_binding_failure(port) [ 748.083121] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.083121] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 748.083884] env[62109]: nova.exception.PortBindingFailed: Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. [ 748.083884] env[62109]: Removing descriptor: 19 [ 748.106614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c21670d45bf74ed3a701adcc3f269228 [ 748.108935] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 748.134495] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 748.134774] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 748.135003] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.135228] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 748.135521] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.135785] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 748.136177] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 748.136402] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 748.136621] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 748.136825] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 748.137043] env[62109]: DEBUG nova.virt.hardware [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 748.138045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a08ffa3-2aa5-4e6d-bcb4-1334a42d91b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.148522] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead2d43f-ff6f-4c78-b3db-39ed4f71fd4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.165282] env[62109]: ERROR nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Traceback (most recent call last): [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] yield resources [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self.driver.spawn(context, instance, image_meta, [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] vm_ref = self.build_virtual_machine(instance, [ 748.165282] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] for vif in network_info: [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] return self._sync_wrapper(fn, *args, **kwargs) [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self.wait() [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self[:] = self._gt.wait() [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] return self._exit_event.wait() [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 748.165991] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] current.throw(*self._exc) [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] result = function(*args, **kwargs) [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] return func(*args, **kwargs) [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] raise e [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] nwinfo = self.network_api.allocate_for_instance( [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] created_port_ids = self._update_ports_for_instance( [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] with excutils.save_and_reraise_exception(): [ 748.166668] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self.force_reraise() [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] raise self.value [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] updated_port = self._update_port( [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] _ensure_no_port_binding_failure(port) [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] raise exception.PortBindingFailed(port_id=port['id']) [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] nova.exception.PortBindingFailed: Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. [ 748.167407] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] [ 748.167407] env[62109]: INFO nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Terminating instance [ 748.168128] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Acquiring lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.243777] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.444530] env[62109]: DEBUG nova.network.neutron [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.455586] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.456058] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.553549] env[62109]: DEBUG nova.network.neutron [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.554111] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] Expecting reply to msg eff147dc73654f0c9bd54a4668145c95 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 748.566624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eff147dc73654f0c9bd54a4668145c95 [ 748.591754] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.635455] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.636171] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 91be923356a344f3a895eb0b1afa3f4a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 748.643895] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91be923356a344f3a895eb0b1afa3f4a [ 748.870362] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cf4431-b5b3-49c0-840b-57f5c9956689 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.878054] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25b2fe0-0d02-441c-b81c-1d21bd9d0a9f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.909240] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1480768-afb6-4e28-8502-1157454a5d68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.916886] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42a3f86-a2e7-4eea-992c-3289392af4d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.929789] env[62109]: DEBUG nova.compute.provider_tree [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.930299] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 92f453eae8f74c26a5791f4d3df91bef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 748.937299] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92f453eae8f74c26a5791f4d3df91bef [ 749.061026] env[62109]: DEBUG oslo_concurrency.lockutils [req-bf025b11-4acd-4775-8944-a8f75aa48cb3 req-5c61e918-6f1c-4571-87e2-c52ef6b2e093 service nova] Releasing lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.061466] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Acquired lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.061656] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 749.062097] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg e8e6e1b4e1d347bb845b14c545a0676a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 749.071018] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8e6e1b4e1d347bb845b14c545a0676a [ 749.138370] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Releasing lock "refresh_cache-6f8e35f3-4b35-449c-9e60-1e0624f41cd2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.138705] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 749.138934] env[62109]: DEBUG nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 749.139103] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 749.166727] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.167299] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 2e427416ea0540b5ac6f6a01b2fda629 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 749.174401] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e427416ea0540b5ac6f6a01b2fda629 [ 749.433189] env[62109]: DEBUG nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 749.435623] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg b1c47f3d7e05427999a7367387678dd9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 749.450586] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1c47f3d7e05427999a7367387678dd9 [ 749.578156] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.657874] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.658456] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg ae1578023e7049f4a4726f822e6c0f7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 749.666385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae1578023e7049f4a4726f822e6c0f7d [ 749.669335] env[62109]: DEBUG nova.network.neutron [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.669796] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg e717767fb31442ce87f3612bb2e1d07d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 749.677529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e717767fb31442ce87f3612bb2e1d07d [ 749.940043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.874s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.940043] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. [ 749.940043] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Traceback (most recent call last): [ 749.940043] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 749.940043] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self.driver.spawn(context, instance, image_meta, [ 749.940043] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 749.940043] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.940043] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.940043] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] vm_ref = self.build_virtual_machine(instance, [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] for vif in network_info: [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] return self._sync_wrapper(fn, *args, **kwargs) [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self.wait() [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self[:] = self._gt.wait() [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] return self._exit_event.wait() [ 749.940385] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] current.throw(*self._exc) [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] result = function(*args, **kwargs) [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] return func(*args, **kwargs) [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] raise e [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] nwinfo = self.network_api.allocate_for_instance( [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] created_port_ids = self._update_ports_for_instance( [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 749.940733] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] with excutils.save_and_reraise_exception(): [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] self.force_reraise() [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] raise self.value [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] updated_port = self._update_port( [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] _ensure_no_port_binding_failure(port) [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] raise exception.PortBindingFailed(port_id=port['id']) [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] nova.exception.PortBindingFailed: Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. [ 749.941185] env[62109]: ERROR nova.compute.manager [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] [ 749.941515] env[62109]: DEBUG nova.compute.utils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 749.941917] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.368s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.943303] env[62109]: INFO nova.compute.claims [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.944849] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 5c9d19f103ec48d08e0bd362459714b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 749.945973] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Build of instance c9a6d28b-52f8-4636-886a-c74f0900e761 was re-scheduled: Binding failed for port 7b566737-b743-4fbb-a685-3882ecc657df, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 749.946385] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 749.946637] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.946782] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquired lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.946936] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 749.947288] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 1b9d7757829c4f90a87f6a737c9e568e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 749.951745] env[62109]: DEBUG nova.compute.manager [req-0833be9d-752e-4135-aa0d-e7d00de9ef89 req-a42e2c02-5521-4fcf-a1fd-c14ff4484880 service nova] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Received event network-vif-deleted-d19cd207-7c44-4737-9b63-dd316aa946f5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 749.954406] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b9d7757829c4f90a87f6a737c9e568e [ 749.980569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c9d19f103ec48d08e0bd362459714b0 [ 750.160936] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Releasing lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.161424] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 750.161626] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 750.161936] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fc9403c7-902a-4ba8-8d57-74832046ea2e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.171189] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc20c39f-3708-4a1f-82ab-c8b87b21b6ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.181308] env[62109]: INFO nova.compute.manager [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] [instance: 6f8e35f3-4b35-449c-9e60-1e0624f41cd2] Took 1.04 seconds to deallocate network for instance. [ 750.182813] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 1398b04491eb49b6b681621516733d6b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 750.193947] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d99c7df-b031-4187-988c-f642f79073d3 could not be found. [ 750.194146] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 750.194315] env[62109]: INFO nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 750.194712] env[62109]: DEBUG oslo.service.loopingcall [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.194992] env[62109]: DEBUG nova.compute.manager [-] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 750.195096] env[62109]: DEBUG nova.network.neutron [-] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 750.208739] env[62109]: DEBUG nova.network.neutron [-] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.209218] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5c3b1842311144a99c8026980cb73ffd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 750.215032] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1398b04491eb49b6b681621516733d6b [ 750.215483] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c3b1842311144a99c8026980cb73ffd [ 750.450694] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 38fd3aeada074f508f8dd13e9df0e04b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 750.458448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38fd3aeada074f508f8dd13e9df0e04b [ 750.466208] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.557436] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.557965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg d28218e8edc843a89b0577f99b46256c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 750.566551] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d28218e8edc843a89b0577f99b46256c [ 750.687684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg 8444f72bbb4a49bfa6d269a42f07b6ed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 750.711117] env[62109]: DEBUG nova.network.neutron [-] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.711657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 63a8c260003c41eea530b23ed2b07af4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 750.718295] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8444f72bbb4a49bfa6d269a42f07b6ed [ 750.720023] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63a8c260003c41eea530b23ed2b07af4 [ 751.059676] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Releasing lock "refresh_cache-c9a6d28b-52f8-4636-886a-c74f0900e761" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.059976] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 751.060118] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 751.060302] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 751.073740] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 751.074301] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg b82358b5ded64c84ae9b8796708f98c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 751.081270] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b82358b5ded64c84ae9b8796708f98c2 [ 751.208448] env[62109]: INFO nova.scheduler.client.report [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Deleted allocations for instance 6f8e35f3-4b35-449c-9e60-1e0624f41cd2 [ 751.215823] env[62109]: INFO nova.compute.manager [-] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Took 1.02 seconds to deallocate network for instance. [ 751.215823] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Expecting reply to msg dd2585f5c0a8472eb1ac1a555006a4c8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 751.217527] env[62109]: DEBUG nova.compute.claims [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 751.217699] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.228537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd2585f5c0a8472eb1ac1a555006a4c8 [ 751.295345] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b60ac2-3325-411f-b643-bc8406ca3cb3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.303237] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8fdb7c-2d3d-4641-aa47-269cfd1e398c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.331745] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26153f50-42fc-45a0-97c7-89a697634087 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.338135] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d129dd02-d483-46dd-9727-3c5d52cd39d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.350769] env[62109]: DEBUG nova.compute.provider_tree [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.350769] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg fb074e0e8cb4465b831acf08759fb2ff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 751.358071] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb074e0e8cb4465b831acf08759fb2ff [ 751.576244] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.576891] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 301d3a2a6cc542c090155a6d35d43bed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 751.585358] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 301d3a2a6cc542c090155a6d35d43bed [ 751.718687] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33e81f1b-3607-4a32-9744-456196feb4a1 tempest-ServerDiagnosticsTest-1641091972 tempest-ServerDiagnosticsTest-1641091972-project-member] Lock "6f8e35f3-4b35-449c-9e60-1e0624f41cd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.191s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.719183] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg a890b877c41b4edf91968b8c0af028e8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 751.728054] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a890b877c41b4edf91968b8c0af028e8 [ 751.853666] env[62109]: DEBUG nova.scheduler.client.report [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 751.856171] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg fc5aebed1a554f48b7ba9d7e6e046556 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 751.870727] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc5aebed1a554f48b7ba9d7e6e046556 [ 752.079356] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: c9a6d28b-52f8-4636-886a-c74f0900e761] Took 1.02 seconds to deallocate network for instance. [ 752.081106] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 80f33aeca566463fa99389c0cea9cee3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 752.114945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80f33aeca566463fa99389c0cea9cee3 [ 752.221212] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 752.223077] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg b30e3c2301fd4704816110c1ed2faebf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 752.259578] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b30e3c2301fd4704816110c1ed2faebf [ 752.358787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.417s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.359484] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 752.361353] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg ab11947d434c45c99df992a524706771 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 752.362424] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.865s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.376582] env[62109]: INFO nova.compute.claims [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.378413] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg ffcc157bac224df4a6ac78ef11ab169a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 752.404175] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab11947d434c45c99df992a524706771 [ 752.441139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffcc157bac224df4a6ac78ef11ab169a [ 752.585249] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 3a5e75b113ef48bf94e167bd52469670 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 752.618050] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a5e75b113ef48bf94e167bd52469670 [ 752.743854] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.881759] env[62109]: DEBUG nova.compute.utils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 752.882394] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg a41dbe65bf844db8a7a9311307c034b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 752.884294] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg b0f9d978c7e840c99a6b1bd6c19c8d14 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 752.885238] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 752.885392] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 752.892195] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0f9d978c7e840c99a6b1bd6c19c8d14 [ 752.897137] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a41dbe65bf844db8a7a9311307c034b0 [ 752.962222] env[62109]: DEBUG nova.policy [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86c0cc330fe84a8abab59cfe4ec905b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d1ce7730c9d48cfb8a68754aad558ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 753.119048] env[62109]: INFO nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Deleted allocations for instance c9a6d28b-52f8-4636-886a-c74f0900e761 [ 753.125107] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 778aa41d1fa542b5b20c9c7d45a744ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 753.142448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 778aa41d1fa542b5b20c9c7d45a744ba [ 753.385968] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 753.387658] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 882e787654c84f87abbe51b448798cb6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 753.393541] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Successfully created port: 1c6a3483-455d-4776-a8a1-23760212d4ef {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.423359] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 882e787654c84f87abbe51b448798cb6 [ 753.628068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "c9a6d28b-52f8-4636-886a-c74f0900e761" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.627s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.628661] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 1a56df2737194b98863effe7b62d9d74 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 753.640542] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a56df2737194b98863effe7b62d9d74 [ 753.788149] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bddeb15-2e58-4ab3-b9fa-340fe1d4423f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.799150] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12af2235-637b-4021-adf7-7443595f29ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.832095] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f8e306-ad52-46de-b8d9-0469a46fda86 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.839648] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59819978-2246-45e1-ab29-b4df3a46f058 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.853785] env[62109]: DEBUG nova.compute.provider_tree [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.854305] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg b9580461e434448484a65644fbff5d72 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 753.864051] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9580461e434448484a65644fbff5d72 [ 753.895163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg e59dbf9fdee342a5b687fcd61357c58e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 753.929463] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e59dbf9fdee342a5b687fcd61357c58e [ 754.130864] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 754.132623] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 7f445d36840c4241874d066130a629ce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 754.176123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f445d36840c4241874d066130a629ce [ 754.259280] env[62109]: DEBUG nova.compute.manager [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Received event network-changed-1c6a3483-455d-4776-a8a1-23760212d4ef {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 754.259469] env[62109]: DEBUG nova.compute.manager [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Refreshing instance network info cache due to event network-changed-1c6a3483-455d-4776-a8a1-23760212d4ef. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 754.259682] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] Acquiring lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.259821] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] Acquired lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.259976] env[62109]: DEBUG nova.network.neutron [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Refreshing network info cache for port 1c6a3483-455d-4776-a8a1-23760212d4ef {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 754.260426] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] Expecting reply to msg 83f3636888c04c698c43b72bfaccd813 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 754.269839] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83f3636888c04c698c43b72bfaccd813 [ 754.360512] env[62109]: DEBUG nova.scheduler.client.report [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 754.360512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 2a404d39f10c4e0589dd5c50d8a0a929 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 754.375042] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a404d39f10c4e0589dd5c50d8a0a929 [ 754.398273] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 754.423965] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 754.424476] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 754.424805] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.425135] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 754.425426] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.425713] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 754.426060] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 754.426375] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 754.426704] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 754.427013] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 754.427329] env[62109]: DEBUG nova.virt.hardware [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 754.428404] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661712af-21fd-4d3a-b0e5-fbfca0dc5b97 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.437818] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e6a41b-7d09-4f09-a7bd-1a301d9bd32c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.655842] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.775722] env[62109]: ERROR nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. [ 754.775722] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 754.775722] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 754.775722] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 754.775722] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 754.775722] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 754.775722] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 754.775722] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 754.775722] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.775722] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 754.775722] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.775722] env[62109]: ERROR nova.compute.manager raise self.value [ 754.775722] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 754.775722] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 754.775722] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.775722] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 754.776326] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.776326] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 754.776326] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. [ 754.776326] env[62109]: ERROR nova.compute.manager [ 754.781867] env[62109]: Traceback (most recent call last): [ 754.781867] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 754.781867] env[62109]: listener.cb(fileno) [ 754.781867] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 754.781867] env[62109]: result = function(*args, **kwargs) [ 754.781867] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 754.781867] env[62109]: return func(*args, **kwargs) [ 754.781867] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 754.781867] env[62109]: raise e [ 754.781867] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 754.781867] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 754.781867] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 754.781867] env[62109]: created_port_ids = self._update_ports_for_instance( [ 754.781867] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 754.781867] env[62109]: with excutils.save_and_reraise_exception(): [ 754.781867] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.781867] env[62109]: self.force_reraise() [ 754.781867] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.781867] env[62109]: raise self.value [ 754.781867] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 754.781867] env[62109]: updated_port = self._update_port( [ 754.781867] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.781867] env[62109]: _ensure_no_port_binding_failure(port) [ 754.781867] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.781867] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 754.781867] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. [ 754.781867] env[62109]: Removing descriptor: 19 [ 754.783241] env[62109]: ERROR nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Traceback (most recent call last): [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] yield resources [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self.driver.spawn(context, instance, image_meta, [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self._vmops.spawn(context, instance, image_meta, injected_files, [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] vm_ref = self.build_virtual_machine(instance, [ 754.783241] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] vif_infos = vmwarevif.get_vif_info(self._session, [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] for vif in network_info: [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] return self._sync_wrapper(fn, *args, **kwargs) [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self.wait() [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self[:] = self._gt.wait() [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] return self._exit_event.wait() [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 754.783662] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] result = hub.switch() [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] return self.greenlet.switch() [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] result = function(*args, **kwargs) [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] return func(*args, **kwargs) [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] raise e [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] nwinfo = self.network_api.allocate_for_instance( [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] created_port_ids = self._update_ports_for_instance( [ 754.784093] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] with excutils.save_and_reraise_exception(): [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self.force_reraise() [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] raise self.value [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] updated_port = self._update_port( [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] _ensure_no_port_binding_failure(port) [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] raise exception.PortBindingFailed(port_id=port['id']) [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] nova.exception.PortBindingFailed: Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. [ 754.784518] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] [ 754.784930] env[62109]: INFO nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Terminating instance [ 754.785826] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.786679] env[62109]: DEBUG nova.network.neutron [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 754.845197] env[62109]: DEBUG nova.network.neutron [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.845978] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] Expecting reply to msg f42c7bdcd23d42fc852130a89d4dc8a6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 754.858167] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f42c7bdcd23d42fc852130a89d4dc8a6 [ 754.878985] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.878985] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 754.878985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg fc072f903be34873ba33d67cfa442863 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 754.878985] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.459s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.878985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 8da1d1dd9c194b4e98825e115b22a408 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 754.905372] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc072f903be34873ba33d67cfa442863 [ 754.938385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8da1d1dd9c194b4e98825e115b22a408 [ 755.358488] env[62109]: DEBUG oslo_concurrency.lockutils [req-8d229bb0-dc92-4f61-a9a2-16c3a606da67 req-793db946-4961-4086-8fb8-6a788b70cb99 service nova] Releasing lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.358865] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquired lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.359084] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 755.359548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg ea138f0607a44bc6b9089e3ce442bf45 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 755.366757] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea138f0607a44bc6b9089e3ce442bf45 [ 755.389752] env[62109]: DEBUG nova.compute.utils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.389752] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 05ad675c6841413e9f99b19935e4977e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 755.390913] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 755.391110] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 755.402963] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05ad675c6841413e9f99b19935e4977e [ 755.463119] env[62109]: DEBUG nova.policy [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86c0cc330fe84a8abab59cfe4ec905b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d1ce7730c9d48cfb8a68754aad558ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 755.808347] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935e8a2d-0505-4950-a45d-b4f64a7c1fdc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.815901] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27127860-fc88-4b54-9f1b-0b49f44cc423 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.852459] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Successfully created port: 27d29d58-7775-4338-8153-6267d4a560a3 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.854862] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a947c3-a480-4fea-8f42-9479ba49c8c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.863962] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b057ed2-b68c-4c41-9e49-e265f1d889ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.879406] env[62109]: DEBUG nova.compute.provider_tree [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.879930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg e8966f50f9c441ff902c9bfc3848c8b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 755.886721] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 755.891812] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8966f50f9c441ff902c9bfc3848c8b0 [ 755.897517] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 755.897517] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 7b89dffe6007446085e166670ded502d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 755.940642] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.941215] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 7d0de08c120b4b16a688ffb56a963950 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 755.942246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b89dffe6007446085e166670ded502d [ 755.950738] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d0de08c120b4b16a688ffb56a963950 [ 756.290529] env[62109]: DEBUG nova.compute.manager [req-eef54173-3f7f-4f4c-9a9c-4c8c52c3db81 req-e544688c-d11d-403d-b3a7-e9eb6410e678 service nova] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Received event network-vif-deleted-1c6a3483-455d-4776-a8a1-23760212d4ef {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 756.388941] env[62109]: DEBUG nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 756.388941] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg b5882ea596274f3c825eb270b98e659c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 756.403121] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5882ea596274f3c825eb270b98e659c [ 756.407868] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg fbe7b3649ca24f99940fd5500082d080 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 756.442338] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbe7b3649ca24f99940fd5500082d080 [ 756.444042] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Releasing lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.444427] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 756.444607] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 756.445104] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66d25be8-5434-4ccc-a732-fec26028cdf0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.454255] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c6aeb4-3ea8-4f77-8122-42e9c9f5236a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.476049] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 87304cf6-e65f-41de-ab6f-d2170aaa9064 could not be found. [ 756.476306] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 756.476507] env[62109]: INFO nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Took 0.03 seconds to destroy the instance on the hypervisor. [ 756.476769] env[62109]: DEBUG oslo.service.loopingcall [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.476991] env[62109]: DEBUG nova.compute.manager [-] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 756.477106] env[62109]: DEBUG nova.network.neutron [-] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 756.492647] env[62109]: DEBUG nova.network.neutron [-] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.493322] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c9a48e2f94dc4357a8343b2aa548fc20 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 756.500164] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9a48e2f94dc4357a8343b2aa548fc20 [ 756.892314] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.025s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.892943] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Traceback (most recent call last): [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self.driver.spawn(context, instance, image_meta, [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] vm_ref = self.build_virtual_machine(instance, [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.892943] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] for vif in network_info: [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] return self._sync_wrapper(fn, *args, **kwargs) [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self.wait() [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self[:] = self._gt.wait() [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] return self._exit_event.wait() [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] current.throw(*self._exc) [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.893282] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] result = function(*args, **kwargs) [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] return func(*args, **kwargs) [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] raise e [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] nwinfo = self.network_api.allocate_for_instance( [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] created_port_ids = self._update_ports_for_instance( [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] with excutils.save_and_reraise_exception(): [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] self.force_reraise() [ 756.893633] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] raise self.value [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] updated_port = self._update_port( [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] _ensure_no_port_binding_failure(port) [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] raise exception.PortBindingFailed(port_id=port['id']) [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] nova.exception.PortBindingFailed: Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. [ 756.893978] env[62109]: ERROR nova.compute.manager [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] [ 756.893978] env[62109]: DEBUG nova.compute.utils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 756.894889] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.655s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.896693] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 576f1cfb974d48588451095d3e0f28c6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 756.897956] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Build of instance 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4 was re-scheduled: Binding failed for port c2098700-1246-4d9f-9eff-78376ba87e82, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 756.898361] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 756.898580] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.898723] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquired lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.898875] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 756.899348] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 47a58971c6304b23966f96b1e544a26b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 756.909660] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 756.911848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47a58971c6304b23966f96b1e544a26b [ 756.935130] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 756.935611] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 756.935611] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.935743] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 756.935850] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.935987] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 756.936427] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 756.936586] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 756.936749] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 756.936904] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 756.937265] env[62109]: DEBUG nova.virt.hardware [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 756.940455] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fc51e8-6f74-4f17-9990-103dddf5e2cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.941967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 576f1cfb974d48588451095d3e0f28c6 [ 756.947835] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9ce514-b6a9-44b9-bdbc-5567a90d750a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.996394] env[62109]: DEBUG nova.network.neutron [-] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.996736] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 310bd1836ed441938e393a6c7adab4b2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 757.005141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 310bd1836ed441938e393a6c7adab4b2 [ 757.384725] env[62109]: DEBUG nova.compute.manager [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Received event network-changed-27d29d58-7775-4338-8153-6267d4a560a3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 757.384935] env[62109]: DEBUG nova.compute.manager [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Refreshing instance network info cache due to event network-changed-27d29d58-7775-4338-8153-6267d4a560a3. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 757.385136] env[62109]: DEBUG oslo_concurrency.lockutils [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] Acquiring lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.385355] env[62109]: DEBUG oslo_concurrency.lockutils [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] Acquired lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.385744] env[62109]: DEBUG nova.network.neutron [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Refreshing network info cache for port 27d29d58-7775-4338-8153-6267d4a560a3 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 757.385953] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] Expecting reply to msg ac0215970c594fe0b6b0cc76b1a01f2a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 757.392557] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac0215970c594fe0b6b0cc76b1a01f2a [ 757.423324] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.499724] env[62109]: INFO nova.compute.manager [-] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Took 1.02 seconds to deallocate network for instance. [ 757.503175] env[62109]: DEBUG nova.compute.claims [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 757.503550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.545411] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.545958] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 0b610dac1a7f4f30b51438114fbfe37c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 757.556117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b610dac1a7f4f30b51438114fbfe37c [ 757.641747] env[62109]: ERROR nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. [ 757.641747] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 757.641747] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.641747] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 757.641747] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.641747] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 757.641747] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.641747] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 757.641747] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.641747] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 757.641747] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.641747] env[62109]: ERROR nova.compute.manager raise self.value [ 757.641747] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.641747] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 757.641747] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.641747] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 757.642463] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.642463] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 757.642463] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. [ 757.642463] env[62109]: ERROR nova.compute.manager [ 757.642463] env[62109]: Traceback (most recent call last): [ 757.642463] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 757.642463] env[62109]: listener.cb(fileno) [ 757.642463] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.642463] env[62109]: result = function(*args, **kwargs) [ 757.642463] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.642463] env[62109]: return func(*args, **kwargs) [ 757.642463] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.642463] env[62109]: raise e [ 757.642463] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.642463] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 757.642463] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.642463] env[62109]: created_port_ids = self._update_ports_for_instance( [ 757.642463] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.642463] env[62109]: with excutils.save_and_reraise_exception(): [ 757.642463] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.642463] env[62109]: self.force_reraise() [ 757.642463] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.642463] env[62109]: raise self.value [ 757.642463] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.642463] env[62109]: updated_port = self._update_port( [ 757.642463] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.642463] env[62109]: _ensure_no_port_binding_failure(port) [ 757.642463] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.642463] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 757.643382] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. [ 757.643382] env[62109]: Removing descriptor: 16 [ 757.643382] env[62109]: ERROR nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Traceback (most recent call last): [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] yield resources [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self.driver.spawn(context, instance, image_meta, [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.643382] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] vm_ref = self.build_virtual_machine(instance, [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] for vif in network_info: [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] return self._sync_wrapper(fn, *args, **kwargs) [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self.wait() [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self[:] = self._gt.wait() [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] return self._exit_event.wait() [ 757.643980] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] result = hub.switch() [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] return self.greenlet.switch() [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] result = function(*args, **kwargs) [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] return func(*args, **kwargs) [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] raise e [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] nwinfo = self.network_api.allocate_for_instance( [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.645149] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] created_port_ids = self._update_ports_for_instance( [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] with excutils.save_and_reraise_exception(): [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self.force_reraise() [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] raise self.value [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] updated_port = self._update_port( [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] _ensure_no_port_binding_failure(port) [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.645895] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] raise exception.PortBindingFailed(port_id=port['id']) [ 757.646519] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] nova.exception.PortBindingFailed: Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. [ 757.646519] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] [ 757.646519] env[62109]: INFO nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Terminating instance [ 757.646519] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.769733] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebd669d-88d3-4842-81eb-bb61f399f4ac {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.776949] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782257fc-4f9e-4c9f-9df1-380a63320f5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.811039] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717006b0-338c-4747-83dd-293549b74822 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.818529] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0e767e-0b52-47c4-938f-866c3d5a3eb3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.831485] env[62109]: DEBUG nova.compute.provider_tree [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.831988] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg c59fc3733c0848b7be7b45b5719d3bf9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 757.838993] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c59fc3733c0848b7be7b45b5719d3bf9 [ 757.900927] env[62109]: DEBUG nova.network.neutron [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.962622] env[62109]: DEBUG nova.network.neutron [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.963298] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] Expecting reply to msg f7a0892f7c6346c58387aea78e1a927a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 757.971943] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7a0892f7c6346c58387aea78e1a927a [ 758.051591] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Releasing lock "refresh_cache-27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.051958] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 758.052134] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 758.052233] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 758.067040] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.067651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg dd7c7f3794c947a3ac3a3cc482c434e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 758.076562] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd7c7f3794c947a3ac3a3cc482c434e5 [ 758.335189] env[62109]: DEBUG nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 758.337708] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg ee323200094a471391bd52301046ae0f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 758.348755] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee323200094a471391bd52301046ae0f [ 758.466190] env[62109]: DEBUG oslo_concurrency.lockutils [req-ea4a0792-0b77-4cf1-8e74-f9f63b37d786 req-93688e97-cc0c-4cb4-bd2e-8779d3eadab8 service nova] Releasing lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.466626] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquired lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.466758] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.467196] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 892bb1e8887148ed8a52aec6b73fb588 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 758.475014] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 892bb1e8887148ed8a52aec6b73fb588 [ 758.569532] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.570065] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 433ec6c3f3e5460dbd3ca49127c5a891 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 758.578104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 433ec6c3f3e5460dbd3ca49127c5a891 [ 758.840899] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.946s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.842192] env[62109]: ERROR nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Traceback (most recent call last): [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self.driver.spawn(context, instance, image_meta, [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] vm_ref = self.build_virtual_machine(instance, [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] vif_infos = vmwarevif.get_vif_info(self._session, [ 758.842192] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] for vif in network_info: [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] return self._sync_wrapper(fn, *args, **kwargs) [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self.wait() [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self[:] = self._gt.wait() [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] return self._exit_event.wait() [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] result = hub.switch() [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 758.842634] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] return self.greenlet.switch() [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] result = function(*args, **kwargs) [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] return func(*args, **kwargs) [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] raise e [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] nwinfo = self.network_api.allocate_for_instance( [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] created_port_ids = self._update_ports_for_instance( [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] with excutils.save_and_reraise_exception(): [ 758.843039] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] self.force_reraise() [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] raise self.value [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] updated_port = self._update_port( [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] _ensure_no_port_binding_failure(port) [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] raise exception.PortBindingFailed(port_id=port['id']) [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] nova.exception.PortBindingFailed: Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. [ 758.843457] env[62109]: ERROR nova.compute.manager [instance: 9f77d364-928f-4595-9253-8bb216b9215b] [ 758.843804] env[62109]: DEBUG nova.compute.utils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 758.843804] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.208s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.844854] env[62109]: INFO nova.compute.claims [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.846523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 1c06fac44b3a4627b0b128be96dc4f07 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 758.849674] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Build of instance 9f77d364-928f-4595-9253-8bb216b9215b was re-scheduled: Binding failed for port 2051acbf-f948-4fb9-b38a-44883022ff72, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 758.849674] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 758.849674] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquiring lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.849674] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Acquired lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.850187] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.850187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg bd5bcbc5afaf4d0fab4d980ff23c1e64 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 758.855604] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd5bcbc5afaf4d0fab4d980ff23c1e64 [ 758.883998] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c06fac44b3a4627b0b128be96dc4f07 [ 758.989065] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.059797] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.060444] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg c29e7d1a89154e8480618f62b7135409 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 759.069476] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c29e7d1a89154e8480618f62b7135409 [ 759.072932] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4] Took 1.02 seconds to deallocate network for instance. [ 759.074083] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 36cf2c395e42419c87c83f92956d56f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 759.108486] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36cf2c395e42419c87c83f92956d56f7 [ 759.352431] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 7de30fe70fa84c1c8620e5bc22e8b947 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 759.360338] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7de30fe70fa84c1c8620e5bc22e8b947 [ 759.369945] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.412015] env[62109]: DEBUG nova.compute.manager [req-609f0676-fc57-482a-876e-5ca7f3a88566 req-f8d2241a-48bf-440f-b372-73ab00a9d54a service nova] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Received event network-vif-deleted-27d29d58-7775-4338-8153-6267d4a560a3 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 759.456893] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.457429] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 8ae8b343633548ddad8ab75099914f0c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 759.465879] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ae8b343633548ddad8ab75099914f0c [ 759.562867] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Releasing lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.564057] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 759.564057] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 759.564057] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5fe5fb3-14d3-4346-883d-dfb7fe27f3b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.573819] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4bb1c1-ce58-49a6-a90b-17f8080a0569 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.586961] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 98114034110040ae84c05211ca75270b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 759.599396] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 90c50f92-c1ff-4ac9-a819-ae0083884e28 could not be found. [ 759.599620] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 759.599794] env[62109]: INFO nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Took 0.04 seconds to destroy the instance on the hypervisor. [ 759.600048] env[62109]: DEBUG oslo.service.loopingcall [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.600260] env[62109]: DEBUG nova.compute.manager [-] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 759.600522] env[62109]: DEBUG nova.network.neutron [-] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 759.614899] env[62109]: DEBUG nova.network.neutron [-] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.615468] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c6b70ba5453d4485a05a677d2eab1c6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 759.617270] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98114034110040ae84c05211ca75270b [ 759.621823] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6b70ba5453d4485a05a677d2eab1c6d [ 759.960828] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Releasing lock "refresh_cache-9f77d364-928f-4595-9253-8bb216b9215b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.961234] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 759.961526] env[62109]: DEBUG nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 759.961807] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 759.978088] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.978759] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 2ce72a80fb9849cb91e146767828ce8d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 759.986035] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ce72a80fb9849cb91e146767828ce8d [ 760.118183] env[62109]: DEBUG nova.network.neutron [-] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.119215] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 45f968bb17d049ed994ee223c2b6121e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 760.133386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45f968bb17d049ed994ee223c2b6121e [ 760.135225] env[62109]: INFO nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Deleted allocations for instance 27b21d98-bf4d-47ee-84e8-d5a3d10d87a4 [ 760.144992] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg 0a7eb7c9ab50454881ef591ca8319f42 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 760.186113] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a7eb7c9ab50454881ef591ca8319f42 [ 760.242338] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacae571-98b1-4b0d-8784-ede3c71c3a9f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.249811] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debc5ef6-db70-4a42-bee4-017a34f5b4d1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.283460] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4336b15-cc80-4604-9b77-b6d1a1086af6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.296605] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da9742a-1aa3-4248-a762-d469c105828f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.317813] env[62109]: DEBUG nova.compute.provider_tree [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.318503] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 2cecc80287eb40269e75c88b7a4cfb6e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 760.327017] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2cecc80287eb40269e75c88b7a4cfb6e [ 760.480737] env[62109]: DEBUG nova.network.neutron [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.481304] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg d43acd20c70247f0adc008f06a7dbdbb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 760.491126] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d43acd20c70247f0adc008f06a7dbdbb [ 760.624359] env[62109]: INFO nova.compute.manager [-] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Took 1.02 seconds to deallocate network for instance. [ 760.626636] env[62109]: DEBUG nova.compute.claims [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 760.626811] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.653530] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "27b21d98-bf4d-47ee-84e8-d5a3d10d87a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.611s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.654123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 9d4c88f849914d8599f9e8839bb9f3a7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 760.666263] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d4c88f849914d8599f9e8839bb9f3a7 [ 760.821865] env[62109]: DEBUG nova.scheduler.client.report [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 760.825009] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg d10287308a6542a49a4446b2a602dccc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 760.841024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d10287308a6542a49a4446b2a602dccc [ 760.984097] env[62109]: INFO nova.compute.manager [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] [instance: 9f77d364-928f-4595-9253-8bb216b9215b] Took 1.02 seconds to deallocate network for instance. [ 760.986011] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg cdf59d96c61a4f6ebc66c4d31928eab4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 761.023144] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdf59d96c61a4f6ebc66c4d31928eab4 [ 761.155687] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 761.157483] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg f59de2b51dd5498a9c4b3fa635a06858 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 761.193461] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f59de2b51dd5498a9c4b3fa635a06858 [ 761.327989] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.328472] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 761.330296] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg a3b744fc1cc24701aff363733b3bd6b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 761.331313] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.185s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.332081] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 0557bc7cb8084dfd9f403ac343f435ed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 761.357402] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0557bc7cb8084dfd9f403ac343f435ed [ 761.360857] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3b744fc1cc24701aff363733b3bd6b6 [ 761.490898] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg cf6906a7e51f4eac8750768dcfae25ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 761.521694] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf6906a7e51f4eac8750768dcfae25ec [ 761.683890] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.835236] env[62109]: DEBUG nova.compute.utils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.835856] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 2dcb3e160ba54334818263988ff48505 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 761.840042] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 4d1f3e25ef8f421ea2e8d8acd1804fd0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 761.840042] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 761.840042] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 761.847192] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dcb3e160ba54334818263988ff48505 [ 761.849044] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d1f3e25ef8f421ea2e8d8acd1804fd0 [ 761.889384] env[62109]: DEBUG nova.policy [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '394e29ffb261493197f3ef862213d774', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a940e96edf044a59d9882d228b2bb21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 762.014391] env[62109]: INFO nova.scheduler.client.report [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Deleted allocations for instance 9f77d364-928f-4595-9253-8bb216b9215b [ 762.020314] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Expecting reply to msg d010d1f145ad4312bce0e9217a99c8ef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 762.032302] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d010d1f145ad4312bce0e9217a99c8ef [ 762.241548] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Successfully created port: b2910613-b431-4ef8-9ab4-20a3b9638084 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.340962] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 762.342819] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg ff3bdc48190f4ed5a6cf31dc910404b3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 762.374809] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 832c9ce1-6344-485a-a9ef-6950d1c78ef9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 762.374965] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3d99c7df-b031-4187-988c-f642f79073d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 762.375105] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 87304cf6-e65f-41de-ab6f-d2170aaa9064 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 762.375198] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 90c50f92-c1ff-4ac9-a819-ae0083884e28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 762.375310] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance ae026dca-dc05-4710-8a03-4e792a0dc61d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 762.375896] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 837b8acd403644d8951ea6d597daa1bd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 762.384415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff3bdc48190f4ed5a6cf31dc910404b3 [ 762.385353] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 837b8acd403644d8951ea6d597daa1bd [ 762.523787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5174db81-624c-4ad9-99ed-ccc1b5918206 tempest-ListServersNegativeTestJSON-505733123 tempest-ListServersNegativeTestJSON-505733123-project-member] Lock "9f77d364-928f-4595-9253-8bb216b9215b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 143.442s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.523787] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg d9fd29651e2342d9875262b80dff8728 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 762.533766] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9fd29651e2342d9875262b80dff8728 [ 762.856990] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 57bedf233b64499891522233ec740e47 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 762.878038] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 16b04a1b-0ab3-4386-a1eb-74ef3e46a553 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 762.878626] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ed21315a044f4198840d109404ae3b7e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 762.888471] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed21315a044f4198840d109404ae3b7e [ 762.903208] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57bedf233b64499891522233ec740e47 [ 763.013666] env[62109]: DEBUG nova.compute.manager [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Received event network-changed-b2910613-b431-4ef8-9ab4-20a3b9638084 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 763.013948] env[62109]: DEBUG nova.compute.manager [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Refreshing instance network info cache due to event network-changed-b2910613-b431-4ef8-9ab4-20a3b9638084. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 763.014083] env[62109]: DEBUG oslo_concurrency.lockutils [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] Acquiring lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.014272] env[62109]: DEBUG oslo_concurrency.lockutils [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] Acquired lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.014391] env[62109]: DEBUG nova.network.neutron [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Refreshing network info cache for port b2910613-b431-4ef8-9ab4-20a3b9638084 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 763.014816] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] Expecting reply to msg 6a4cce540c1548ca858ee5638b13963d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 763.023980] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a4cce540c1548ca858ee5638b13963d [ 763.024827] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 763.026454] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 10110862fb5a4105a4f5288c7b8cd459 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 763.057707] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10110862fb5a4105a4f5288c7b8cd459 [ 763.184136] env[62109]: ERROR nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. [ 763.184136] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 763.184136] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.184136] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 763.184136] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.184136] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 763.184136] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.184136] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 763.184136] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.184136] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 763.184136] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.184136] env[62109]: ERROR nova.compute.manager raise self.value [ 763.184136] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.184136] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 763.184136] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.184136] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 763.184604] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.184604] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 763.184604] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. [ 763.184604] env[62109]: ERROR nova.compute.manager [ 763.184604] env[62109]: Traceback (most recent call last): [ 763.184604] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 763.184604] env[62109]: listener.cb(fileno) [ 763.184604] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.184604] env[62109]: result = function(*args, **kwargs) [ 763.184604] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 763.184604] env[62109]: return func(*args, **kwargs) [ 763.184604] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.184604] env[62109]: raise e [ 763.184604] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.184604] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 763.184604] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.184604] env[62109]: created_port_ids = self._update_ports_for_instance( [ 763.184604] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.184604] env[62109]: with excutils.save_and_reraise_exception(): [ 763.184604] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.184604] env[62109]: self.force_reraise() [ 763.184604] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.184604] env[62109]: raise self.value [ 763.184604] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.184604] env[62109]: updated_port = self._update_port( [ 763.184604] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.184604] env[62109]: _ensure_no_port_binding_failure(port) [ 763.184604] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.184604] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 763.185438] env[62109]: nova.exception.PortBindingFailed: Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. [ 763.185438] env[62109]: Removing descriptor: 16 [ 763.360517] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 763.381334] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 08638aac-2c6c-4580-9894-6b3b3c1ec484 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.381897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg fed742feb33b4ed4972ca9a9cbd33e91 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 763.387947] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 763.388200] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 763.388358] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.388635] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 763.388680] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.389730] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 763.389730] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 763.389730] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 763.389730] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 763.389730] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 763.389904] env[62109]: DEBUG nova.virt.hardware [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.390463] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9143f25e-6be5-423c-aff9-04ca5731898c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.398388] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3433bc9e-0284-4ce0-9c9a-959ede3b1e2c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.402895] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fed742feb33b4ed4972ca9a9cbd33e91 [ 763.413387] env[62109]: ERROR nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Traceback (most recent call last): [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] yield resources [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self.driver.spawn(context, instance, image_meta, [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] vm_ref = self.build_virtual_machine(instance, [ 763.413387] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] for vif in network_info: [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] return self._sync_wrapper(fn, *args, **kwargs) [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self.wait() [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self[:] = self._gt.wait() [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] return self._exit_event.wait() [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 763.413727] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] current.throw(*self._exc) [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] result = function(*args, **kwargs) [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] return func(*args, **kwargs) [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] raise e [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] nwinfo = self.network_api.allocate_for_instance( [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] created_port_ids = self._update_ports_for_instance( [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] with excutils.save_and_reraise_exception(): [ 763.414091] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self.force_reraise() [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] raise self.value [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] updated_port = self._update_port( [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] _ensure_no_port_binding_failure(port) [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] raise exception.PortBindingFailed(port_id=port['id']) [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] nova.exception.PortBindingFailed: Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. [ 763.414544] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] [ 763.414544] env[62109]: INFO nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Terminating instance [ 763.415493] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Acquiring lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.534939] env[62109]: DEBUG nova.network.neutron [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.557073] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.623057] env[62109]: DEBUG nova.network.neutron [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.623626] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] Expecting reply to msg 57f2b303b0ac4705b5036381b94e492d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 763.633207] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57f2b303b0ac4705b5036381b94e492d [ 763.884918] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 66a0a424-ecb6-43df-9b47-946ff1e1b7b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.885516] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 82176f06921f45f49dc0de93893ff0a6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 763.898397] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82176f06921f45f49dc0de93893ff0a6 [ 764.125823] env[62109]: DEBUG oslo_concurrency.lockutils [req-dd199438-4034-4925-96dd-93f24290e1f9 req-c636266d-e837-4348-9e87-b8ff91907e63 service nova] Releasing lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.126291] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Acquired lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.126476] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 764.126915] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg d703983bad664f3ca40285bd6d60e387 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 764.133429] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d703983bad664f3ca40285bd6d60e387 [ 764.387612] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance e8c77459-e3a3-4a68-9f76-0757dd0f2587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.388249] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 1ab81539171b4c2fb5e062f984a8cfe6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 764.402206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ab81539171b4c2fb5e062f984a8cfe6 [ 764.650507] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.731597] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.732159] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 36752d9ec3bb495a921e70d1f8809ab7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 764.740785] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36752d9ec3bb495a921e70d1f8809ab7 [ 764.890733] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 9a1c4327-64b3-4c4d-b6ae-77959084b405 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.891316] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 26db84d599934ee7b8859f11bc555c1f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 764.901288] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26db84d599934ee7b8859f11bc555c1f [ 765.044570] env[62109]: DEBUG nova.compute.manager [req-167ce501-203a-40c6-a4d6-a60f341d95f4 req-a88ae835-3186-4323-854b-dc6352beabe4 service nova] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Received event network-vif-deleted-b2910613-b431-4ef8-9ab4-20a3b9638084 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 765.234650] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Releasing lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.235152] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 765.235398] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 765.235733] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-acf9e3b7-f0ef-4c70-9980-b3cffeb97316 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.244342] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6516ce2-dbb6-4e20-945d-a58ce0fc96fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.270255] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ae026dca-dc05-4710-8a03-4e792a0dc61d could not be found. [ 765.270255] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 765.270255] env[62109]: INFO nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 765.270255] env[62109]: DEBUG oslo.service.loopingcall [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.270255] env[62109]: DEBUG nova.compute.manager [-] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 765.270255] env[62109]: DEBUG nova.network.neutron [-] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 765.284334] env[62109]: DEBUG nova.network.neutron [-] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.284334] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fab96fb88b87459783027a6383cb3a39 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 765.295005] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fab96fb88b87459783027a6383cb3a39 [ 765.393934] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance a6ec5486-0843-4c38-b187-35d5296965a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 765.394607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 16015ff997574bf6a3d13187581535ff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 765.404856] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16015ff997574bf6a3d13187581535ff [ 765.785530] env[62109]: DEBUG nova.network.neutron [-] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.786209] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6823ac84c2314895b689ed10603fde44 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 765.794131] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6823ac84c2314895b689ed10603fde44 [ 765.897611] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance c7ec619c-1b00-4d58-a593-671c0139c4e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 765.898290] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg d7c6453dbda449be8a1a755c60a6ac15 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 765.908354] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7c6453dbda449be8a1a755c60a6ac15 [ 766.288059] env[62109]: INFO nova.compute.manager [-] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Took 1.02 seconds to deallocate network for instance. [ 766.290458] env[62109]: DEBUG nova.compute.claims [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 766.290648] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.400829] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance a04d014e-bed6-4e4b-a5eb-316d88c174f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.401327] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 6a18361d75c6467db0c1b10da37b6cf7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 766.411410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a18361d75c6467db0c1b10da37b6cf7 [ 766.903630] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance b95c60dc-50c4-4afc-acb0-3308e490b808 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.904230] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg dfcab6d475374cd3bda29825d72c14ea in queue reply_7522b64acfeb4981b1f36928b040d568 [ 766.915130] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfcab6d475374cd3bda29825d72c14ea [ 767.406782] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 900e1e1e-5635-4782-bd87-046dd2af7dad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.407409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 55d79da35f14456b973ab10ea87cc0b3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 767.417784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55d79da35f14456b973ab10ea87cc0b3 [ 767.910049] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance afc5587e-7fd5-4b07-aff8-98ef8358985f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.910714] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 64f9841cd4c0435c8a5e058b4fbf3ae5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 767.922728] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64f9841cd4c0435c8a5e058b4fbf3ae5 [ 768.413667] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 47b83dbe-d7d8-4875-bb79-95a8fecf4028 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.414334] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ac976d8fea3445749b114579fd4dfcd1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 768.424575] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac976d8fea3445749b114579fd4dfcd1 [ 768.916752] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance aa1afca5-8194-4a9d-bcd0-e3e91c15338c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.917577] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 379df72b1f9f45f89107642349e8a184 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 768.927676] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 379df72b1f9f45f89107642349e8a184 [ 769.420484] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 732cf1e3-823d-4769-ad16-f5b492be53d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.421231] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 1e2ab150e0a340d3b364310db9c9482b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 769.431289] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e2ab150e0a340d3b364310db9c9482b [ 769.923431] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 436788b9-92bb-4088-9c24-c2e9a073c09d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.923961] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 9c7d88a298574d179f3d813b9bdf9339 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 769.933777] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c7d88a298574d179f3d813b9bdf9339 [ 770.426257] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 53d6d89d-04bb-421d-994c-014830491dfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.426831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg de8162e2e5794037beb6704ba1fb5dab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 770.436722] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de8162e2e5794037beb6704ba1fb5dab [ 770.931137] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3ada5090-7219-4835-b508-2188501ae5e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.931137] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 82acdf86bfea4111a97aadc9c701cd45 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 770.940583] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82acdf86bfea4111a97aadc9c701cd45 [ 771.433382] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 6163fcd4-cfe4-4432-ba8d-665319fa11ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.434048] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 09ac8cad0fc5460b8b93e23ee5eebb5f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 771.446304] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09ac8cad0fc5460b8b93e23ee5eebb5f [ 771.936850] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 252b7e84-4f91-4078-a81c-392d622b6ce2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.937450] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 44dd4dfd7ef84893b741e7d7c96576a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 771.948545] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44dd4dfd7ef84893b741e7d7c96576a9 [ 772.439462] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.439775] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 772.439860] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 772.768855] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95bc06a-af7f-472c-bdad-0f1201965f56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.776774] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf12886-350f-4ab6-bf2e-9d57e72221be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.805601] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc31cc1-61ae-404a-8427-0345e9fe0037 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.813343] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d81c22e-32ce-4591-aba7-b45440588a4d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.827484] env[62109]: DEBUG nova.compute.provider_tree [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.828023] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg a0ee0cdf4a5f4719ac19811baca5c2e3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 772.834637] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0ee0cdf4a5f4719ac19811baca5c2e3 [ 773.331029] env[62109]: DEBUG nova.scheduler.client.report [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 773.333490] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 16075c0d056045e4a28b6bbe0ce01eee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 773.348992] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16075c0d056045e4a28b6bbe0ce01eee [ 773.836388] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 773.836703] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.505s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.836909] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.140s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.838804] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d2d07cb6bd9b4c6dbc2af23a9ea52580 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 773.884769] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2d07cb6bd9b4c6dbc2af23a9ea52580 [ 774.647247] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9b6834-2a8d-412c-a989-a959e08d04bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.654613] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955b9d80-58f2-4c80-a3ca-0fa82c1651b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.684685] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c1f82c-a122-4d11-9915-bde91543a8a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.691479] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81d0d7b-d033-45dd-80a0-68487f6749f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.703931] env[62109]: DEBUG nova.compute.provider_tree [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.704571] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg fbd3afc0ddf84124bd4a35216d34ac95 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 774.712206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbd3afc0ddf84124bd4a35216d34ac95 [ 775.210027] env[62109]: DEBUG nova.scheduler.client.report [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 775.212427] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 76d43716b8c84447ad0fb038871a10db in queue reply_7522b64acfeb4981b1f36928b040d568 [ 775.223964] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76d43716b8c84447ad0fb038871a10db [ 775.715449] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.878s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.716101] env[62109]: ERROR nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Traceback (most recent call last): [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self.driver.spawn(context, instance, image_meta, [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] vm_ref = self.build_virtual_machine(instance, [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] vif_infos = vmwarevif.get_vif_info(self._session, [ 775.716101] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] for vif in network_info: [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] return self._sync_wrapper(fn, *args, **kwargs) [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self.wait() [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self[:] = self._gt.wait() [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] return self._exit_event.wait() [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] current.throw(*self._exc) [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 775.716544] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] result = function(*args, **kwargs) [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] return func(*args, **kwargs) [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] raise e [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] nwinfo = self.network_api.allocate_for_instance( [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] created_port_ids = self._update_ports_for_instance( [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] with excutils.save_and_reraise_exception(): [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] self.force_reraise() [ 775.716955] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] raise self.value [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] updated_port = self._update_port( [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] _ensure_no_port_binding_failure(port) [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] raise exception.PortBindingFailed(port_id=port['id']) [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] nova.exception.PortBindingFailed: Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. [ 775.717369] env[62109]: ERROR nova.compute.manager [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] [ 775.717369] env[62109]: DEBUG nova.compute.utils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 775.718140] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.475s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.719548] env[62109]: INFO nova.compute.claims [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.721237] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 8ad28f303c2f4519930ddf7b6e21b375 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 775.722394] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Build of instance 832c9ce1-6344-485a-a9ef-6950d1c78ef9 was re-scheduled: Binding failed for port bd3d8b29-3381-4851-99c3-877677b6056f, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 775.722791] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 775.723009] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.723182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.723348] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 775.723700] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg bf2a07c86bec47869b241bd3ecb59f97 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 775.733090] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf2a07c86bec47869b241bd3ecb59f97 [ 775.752094] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ad28f303c2f4519930ddf7b6e21b375 [ 776.005048] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.005273] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.005941] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 8cb12d76fe1a47abb8235acd31bd9159 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 776.018452] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cb12d76fe1a47abb8235acd31bd9159 [ 776.226854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 4f99a3ee026a4b29b2621618e898169e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 776.237230] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f99a3ee026a4b29b2621618e898169e [ 776.244031] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 776.339347] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.339904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 26dc8fe5fe8b464ab6b0739f9e28408f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 776.348282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26dc8fe5fe8b464ab6b0739f9e28408f [ 776.511077] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.511077] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 776.511240] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 776.511681] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 26245eaf40c04f6fb0d75cb8e342ea05 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 776.524610] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26245eaf40c04f6fb0d75cb8e342ea05 [ 776.842315] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-832c9ce1-6344-485a-a9ef-6950d1c78ef9" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.842315] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 776.842478] env[62109]: DEBUG nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 776.842658] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 776.866727] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 776.867682] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 3930900982bd44e2a6c7d0398596590f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 776.875065] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3930900982bd44e2a6c7d0398596590f [ 776.933400] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Acquiring lock "f453b695-8abd-44fa-8468-75c6aaeec19a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.933631] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Lock "f453b695-8abd-44fa-8468-75c6aaeec19a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.015185] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 777.015358] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 777.015487] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 777.015611] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 777.015732] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 777.015874] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Didn't find any instances for network info cache update. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 777.016671] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fee6101-1071-4a05-9927-3dfc3dbc87d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.019124] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 777.019300] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 777.019824] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 777.019989] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 777.020158] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 777.020336] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 777.020456] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 777.020551] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 777.020904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 55376f9b8a4d474f9ae73f8eb42ede1b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 777.024347] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324a5186-6f14-48ed-af19-8180c43a1829 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.057816] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55376f9b8a4d474f9ae73f8eb42ede1b [ 777.058790] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c77117e-89d8-4904-a79a-2e0be150ac7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.066568] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d22ec4-ac31-47b9-9348-1dd2c15ab480 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.080409] env[62109]: DEBUG nova.compute.provider_tree [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.081090] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 0aa8f7d8032847de9a70e7af4449140a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 777.092905] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aa8f7d8032847de9a70e7af4449140a [ 777.371342] env[62109]: DEBUG nova.network.neutron [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.371342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 78608cd72a8c4cedb2ce632dabefdfb5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 777.379193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78608cd72a8c4cedb2ce632dabefdfb5 [ 777.525255] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.589083] env[62109]: DEBUG nova.scheduler.client.report [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 777.591510] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 2e9e380bbd604f7e98a006d1ba47772b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 777.604887] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e9e380bbd604f7e98a006d1ba47772b [ 777.874994] env[62109]: INFO nova.compute.manager [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 832c9ce1-6344-485a-a9ef-6950d1c78ef9] Took 1.03 seconds to deallocate network for instance. [ 777.874994] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 23fc872543044222acb99bb60438aec8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 777.906147] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23fc872543044222acb99bb60438aec8 [ 778.095063] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.095436] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 778.097137] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg f6452050c67d4b9eaa3e9d758d94fe55 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 778.098856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.881s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.100755] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 393a750eab5944f59a3f4fe906928508 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 778.130380] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6452050c67d4b9eaa3e9d758d94fe55 [ 778.133763] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 393a750eab5944f59a3f4fe906928508 [ 778.379790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 8e30360c38964df4b264de197541e157 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 778.409163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e30360c38964df4b264de197541e157 [ 778.600079] env[62109]: DEBUG nova.compute.utils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 778.600722] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 2088c009bffd4c929df5d22ba5aec90d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 778.602146] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 778.602511] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 778.612551] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2088c009bffd4c929df5d22ba5aec90d [ 778.670527] env[62109]: DEBUG nova.policy [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '332259bb7d2841e385dc46a558da2fd9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5eead97e11a4688b0058d50d5bef6ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 778.905203] env[62109]: INFO nova.scheduler.client.report [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Deleted allocations for instance 832c9ce1-6344-485a-a9ef-6950d1c78ef9 [ 778.913376] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 4deec9f129af4ad495f36ed27a65e00a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 778.942249] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4deec9f129af4ad495f36ed27a65e00a [ 778.967149] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5df4468-d264-416c-ab43-793dcd78aea5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.976033] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eea80da-1bf2-4ae2-b129-2ab2be013b45 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.008257] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Successfully created port: f735d74f-e61d-4468-b208-318e406dcc17 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.011280] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea76a9d-14d1-4c11-bdb4-d4a5374735dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.020784] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d43b04-6f64-4bb0-bd44-5dce01646abc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.034080] env[62109]: DEBUG nova.compute.provider_tree [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.034595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 994afa02ff3548668f79b4a05ea44d0d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 779.041293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 994afa02ff3548668f79b4a05ea44d0d [ 779.103023] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 779.104764] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 5d8f8e630310405abf74b214564878c0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 779.137691] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d8f8e630310405abf74b214564878c0 [ 779.414978] env[62109]: DEBUG oslo_concurrency.lockutils [None req-95852330-b22c-4e57-b3fc-19c0b2a26db1 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "832c9ce1-6344-485a-a9ef-6950d1c78ef9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.731s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.415604] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg a6ab322903fc4222bf3e135e81543228 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 779.432654] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6ab322903fc4222bf3e135e81543228 [ 779.537410] env[62109]: DEBUG nova.scheduler.client.report [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 779.540193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 0c8e2c7bfc64411d99d04f505a7178cc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 779.554192] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c8e2c7bfc64411d99d04f505a7178cc [ 779.611140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg ed97f835e79346aba31f346e1a2f7b8c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 779.647003] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed97f835e79346aba31f346e1a2f7b8c [ 779.917993] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 779.919742] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg c67329d776214c1f81d95c5268b96eba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 779.952755] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c67329d776214c1f81d95c5268b96eba [ 780.044350] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.945s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.044985] env[62109]: ERROR nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Traceback (most recent call last): [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self.driver.spawn(context, instance, image_meta, [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] vm_ref = self.build_virtual_machine(instance, [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] vif_infos = vmwarevif.get_vif_info(self._session, [ 780.044985] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] for vif in network_info: [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] return self._sync_wrapper(fn, *args, **kwargs) [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self.wait() [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self[:] = self._gt.wait() [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] return self._exit_event.wait() [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] current.throw(*self._exc) [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 780.045319] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] result = function(*args, **kwargs) [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] return func(*args, **kwargs) [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] raise e [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] nwinfo = self.network_api.allocate_for_instance( [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] created_port_ids = self._update_ports_for_instance( [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] with excutils.save_and_reraise_exception(): [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] self.force_reraise() [ 780.045671] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] raise self.value [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] updated_port = self._update_port( [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] _ensure_no_port_binding_failure(port) [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] raise exception.PortBindingFailed(port_id=port['id']) [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] nova.exception.PortBindingFailed: Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. [ 780.046039] env[62109]: ERROR nova.compute.manager [instance: 3d99c7df-b031-4187-988c-f642f79073d3] [ 780.046039] env[62109]: DEBUG nova.compute.utils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 780.046921] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.303s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.048483] env[62109]: INFO nova.compute.claims [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.050156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 2a315203952b441389cadb424e8032ee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 780.058396] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Build of instance 3d99c7df-b031-4187-988c-f642f79073d3 was re-scheduled: Binding failed for port d19cd207-7c44-4737-9b63-dd316aa946f5, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 780.058396] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 780.058396] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Acquiring lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.058396] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Acquired lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.058836] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 780.058836] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 7c0cf769647a4625b25d14586793dc52 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 780.059592] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c0cf769647a4625b25d14586793dc52 [ 780.068718] env[62109]: DEBUG nova.compute.manager [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Received event network-changed-f735d74f-e61d-4468-b208-318e406dcc17 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 780.068878] env[62109]: DEBUG nova.compute.manager [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Refreshing instance network info cache due to event network-changed-f735d74f-e61d-4468-b208-318e406dcc17. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 780.069135] env[62109]: DEBUG oslo_concurrency.lockutils [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] Acquiring lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.069276] env[62109]: DEBUG oslo_concurrency.lockutils [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] Acquired lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.069435] env[62109]: DEBUG nova.network.neutron [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Refreshing network info cache for port f735d74f-e61d-4468-b208-318e406dcc17 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 780.069915] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] Expecting reply to msg ad2ed5b6314f4175bf734329d65a140d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 780.084403] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad2ed5b6314f4175bf734329d65a140d [ 780.089967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a315203952b441389cadb424e8032ee [ 780.114133] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 780.134973] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:15:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='f6cf26b4-f874-44a7-b08a-ce8c51570f46',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-659875463',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 780.134973] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 780.135101] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.135243] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 780.135381] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.135520] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 780.135718] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 780.135866] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 780.136063] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 780.136227] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 780.136389] env[62109]: DEBUG nova.virt.hardware [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 780.137550] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f9a55f-d16d-470c-82fd-a0b4f85e25a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.146043] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80a1d46-a0e9-45d8-bb94-2bfbb667b9ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.151958] env[62109]: ERROR nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. [ 780.151958] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 780.151958] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.151958] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 780.151958] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 780.151958] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 780.151958] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 780.151958] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 780.151958] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.151958] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 780.151958] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.151958] env[62109]: ERROR nova.compute.manager raise self.value [ 780.151958] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 780.151958] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 780.151958] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.151958] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 780.152505] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.152505] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 780.152505] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. [ 780.152505] env[62109]: ERROR nova.compute.manager [ 780.152505] env[62109]: Traceback (most recent call last): [ 780.152505] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 780.152505] env[62109]: listener.cb(fileno) [ 780.152505] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 780.152505] env[62109]: result = function(*args, **kwargs) [ 780.152505] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 780.152505] env[62109]: return func(*args, **kwargs) [ 780.152505] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 780.152505] env[62109]: raise e [ 780.152505] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.152505] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 780.152505] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 780.152505] env[62109]: created_port_ids = self._update_ports_for_instance( [ 780.152505] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 780.152505] env[62109]: with excutils.save_and_reraise_exception(): [ 780.152505] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.152505] env[62109]: self.force_reraise() [ 780.152505] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.152505] env[62109]: raise self.value [ 780.152505] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 780.152505] env[62109]: updated_port = self._update_port( [ 780.152505] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.152505] env[62109]: _ensure_no_port_binding_failure(port) [ 780.152505] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.152505] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 780.153377] env[62109]: nova.exception.PortBindingFailed: Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. [ 780.153377] env[62109]: Removing descriptor: 16 [ 780.161265] env[62109]: ERROR nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Traceback (most recent call last): [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] yield resources [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self.driver.spawn(context, instance, image_meta, [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self._vmops.spawn(context, instance, image_meta, injected_files, [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] vm_ref = self.build_virtual_machine(instance, [ 780.161265] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] vif_infos = vmwarevif.get_vif_info(self._session, [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] for vif in network_info: [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] return self._sync_wrapper(fn, *args, **kwargs) [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self.wait() [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self[:] = self._gt.wait() [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] return self._exit_event.wait() [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 780.161699] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] current.throw(*self._exc) [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] result = function(*args, **kwargs) [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] return func(*args, **kwargs) [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] raise e [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] nwinfo = self.network_api.allocate_for_instance( [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] created_port_ids = self._update_ports_for_instance( [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] with excutils.save_and_reraise_exception(): [ 780.162115] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self.force_reraise() [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] raise self.value [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] updated_port = self._update_port( [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] _ensure_no_port_binding_failure(port) [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] raise exception.PortBindingFailed(port_id=port['id']) [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] nova.exception.PortBindingFailed: Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. [ 780.162525] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] [ 780.162525] env[62109]: INFO nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Terminating instance [ 780.163687] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.335432] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.335678] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.439920] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.555535] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 9d3c31f34bf742bda6fd6f42efdfe0ea in queue reply_7522b64acfeb4981b1f36928b040d568 [ 780.563376] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d3c31f34bf742bda6fd6f42efdfe0ea [ 780.574630] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 780.592399] env[62109]: DEBUG nova.network.neutron [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 780.636978] env[62109]: DEBUG nova.network.neutron [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.637492] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] Expecting reply to msg 6984018d38ea4beea11ac14b69dbce58 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 780.646042] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6984018d38ea4beea11ac14b69dbce58 [ 780.672136] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.672670] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg e7090270b8e542c8876d26b66f5b3448 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 780.680841] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7090270b8e542c8876d26b66f5b3448 [ 781.139897] env[62109]: DEBUG oslo_concurrency.lockutils [req-e338758d-8c42-485a-a1dd-2b9889eb2ae3 req-aa3269cc-c0a0-4e02-b17a-44a0b01ac876 service nova] Releasing lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.140342] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquired lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.140528] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 781.140975] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 81bb5af55f8748c485d1981a39bcec8e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 781.148200] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81bb5af55f8748c485d1981a39bcec8e [ 781.174848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Releasing lock "refresh_cache-3d99c7df-b031-4187-988c-f642f79073d3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.175103] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 781.175285] env[62109]: DEBUG nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 781.175460] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 781.191920] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.192540] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 06a55e08dacf45b697884f4a20369c50 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 781.207772] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06a55e08dacf45b697884f4a20369c50 [ 781.406249] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490b7bf6-f476-4088-96ad-cf422241b37f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.413612] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc69a2e-447d-4740-9943-c556d2621315 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.443867] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9cb8a9-487c-4d8b-ada0-3c0307eb0094 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.451124] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e13bbb0-fb9f-4ab3-83de-2b99e4d08d5f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.464864] env[62109]: DEBUG nova.compute.provider_tree [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.465387] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 78d315b2ed384f849c2f1b247cd33a03 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 781.472172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78d315b2ed384f849c2f1b247cd33a03 [ 781.659633] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.694768] env[62109]: DEBUG nova.network.neutron [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.695453] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg a33afc69ef7d467ea5679c26ebcbb7af in queue reply_7522b64acfeb4981b1f36928b040d568 [ 781.704196] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a33afc69ef7d467ea5679c26ebcbb7af [ 781.715429] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.716046] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg f4c5de520b1d4dc1b423b8dbee6522ea in queue reply_7522b64acfeb4981b1f36928b040d568 [ 781.725853] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4c5de520b1d4dc1b423b8dbee6522ea [ 781.967896] env[62109]: DEBUG nova.scheduler.client.report [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.970679] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg c27e6327aca34bb5b411efb277bc154c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 781.983471] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c27e6327aca34bb5b411efb277bc154c [ 782.095673] env[62109]: DEBUG nova.compute.manager [req-70641dd0-1369-48b5-bb5e-3de891214920 req-d611d762-c83a-4bb0-8f4f-b5064bdbdf1d service nova] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Received event network-vif-deleted-f735d74f-e61d-4468-b208-318e406dcc17 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 782.198108] env[62109]: INFO nova.compute.manager [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] [instance: 3d99c7df-b031-4187-988c-f642f79073d3] Took 1.02 seconds to deallocate network for instance. [ 782.200036] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg e40602d2456b42a691ecdde14befb7aa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 782.226278] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Releasing lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.226915] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 782.227239] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 782.227891] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-322e0630-287e-4b6a-baad-66ec746a6564 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.237745] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ccac16-21c4-449a-ab3f-0f8940275f3a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.248337] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e40602d2456b42a691ecdde14befb7aa [ 782.261778] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 16b04a1b-0ab3-4386-a1eb-74ef3e46a553 could not be found. [ 782.262172] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 782.262482] env[62109]: INFO nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Took 0.04 seconds to destroy the instance on the hypervisor. [ 782.262925] env[62109]: DEBUG oslo.service.loopingcall [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 782.263264] env[62109]: DEBUG nova.compute.manager [-] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 782.263479] env[62109]: DEBUG nova.network.neutron [-] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 782.278446] env[62109]: DEBUG nova.network.neutron [-] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 782.279056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c226ad6750344632bbb86eec70ccf3eb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 782.285610] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c226ad6750344632bbb86eec70ccf3eb [ 782.474324] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.474944] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 782.476913] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg dae98fa957a24a97bb7ee96839ff2a8e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 782.477975] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.822s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.479464] env[62109]: INFO nova.compute.claims [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.480924] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 61bb37b6d8f944a3935089f00d910e5b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 782.512222] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dae98fa957a24a97bb7ee96839ff2a8e [ 782.528581] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61bb37b6d8f944a3935089f00d910e5b [ 782.704270] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg 30b36e53ec3b4bb98c26cd166345988f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 782.736205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30b36e53ec3b4bb98c26cd166345988f [ 782.780852] env[62109]: DEBUG nova.network.neutron [-] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.781307] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c22a8a722e2c4685ac815f64dd315a83 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 782.789425] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c22a8a722e2c4685ac815f64dd315a83 [ 782.984107] env[62109]: DEBUG nova.compute.utils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 782.984777] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 8c8e38d13001483a93945cd972b1faf8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 782.986176] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 782.986224] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 782.989156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg c7cf60bfa3db4e4bbe7288b1dac84ecd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 782.997723] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7cf60bfa3db4e4bbe7288b1dac84ecd [ 782.998838] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c8e38d13001483a93945cd972b1faf8 [ 783.176890] env[62109]: DEBUG nova.policy [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0933891c54584b059f68770a8c930f1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d361e27776e47eeadaa4a29b4f9338f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 783.226372] env[62109]: INFO nova.scheduler.client.report [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Deleted allocations for instance 3d99c7df-b031-4187-988c-f642f79073d3 [ 783.232350] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Expecting reply to msg ea4137a203f74d1a9d9e236aa0de632c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 783.256539] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea4137a203f74d1a9d9e236aa0de632c [ 783.284254] env[62109]: INFO nova.compute.manager [-] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Took 1.02 seconds to deallocate network for instance. [ 783.286547] env[62109]: DEBUG nova.compute.claims [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 783.286719] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.495410] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 783.497672] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg a3ead0548c5046f0ba33cc9c9684f12a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 783.517454] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Successfully created port: ce1aee7f-24df-45b4-acff-b0bbb0657cbe {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 783.536418] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3ead0548c5046f0ba33cc9c9684f12a [ 783.734190] env[62109]: DEBUG oslo_concurrency.lockutils [None req-bd7c0011-a90a-453e-881d-fe8dbfc17285 tempest-ServersV294TestFqdnHostnames-815487066 tempest-ServersV294TestFqdnHostnames-815487066-project-member] Lock "3d99c7df-b031-4187-988c-f642f79073d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.495s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.734765] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 73bc6276b6fe4851bcb285fda927f4b4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 783.743617] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73bc6276b6fe4851bcb285fda927f4b4 [ 783.824644] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198ea400-a411-4f7b-9037-c17890a5ebcc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.833301] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eda9608-8397-47d9-a815-6af2285465e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.864550] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fec9f83-a4d4-47f8-8d3e-f030e09389f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.872264] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7607865f-bebd-45a2-b3eb-be0e18daa3cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.885526] env[62109]: DEBUG nova.compute.provider_tree [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.886068] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 934e7e0d4fc74b51b2d91e8b78148aef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 783.893601] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 934e7e0d4fc74b51b2d91e8b78148aef [ 784.002849] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg d602cacab52648d5a2b23dd7fe218a7b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 784.047538] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d602cacab52648d5a2b23dd7fe218a7b [ 784.237158] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 784.244967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 0d66db54877449e7a4204e6a04199c37 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 784.282810] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d66db54877449e7a4204e6a04199c37 [ 784.388937] env[62109]: DEBUG nova.scheduler.client.report [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.391366] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg c73329a7e86842059977c55d967e0d04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 784.403505] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c73329a7e86842059977c55d967e0d04 [ 784.506273] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 784.530211] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.530460] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.530611] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.530789] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.530934] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.531078] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.531281] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.531437] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.531597] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.531760] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.531931] env[62109]: DEBUG nova.virt.hardware [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.532800] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6e406d-af3c-4324-87b4-9d7c3a602c59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.541135] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb74e29-b6f7-442c-b7ef-548bf2f917b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.556055] env[62109]: DEBUG nova.compute.manager [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Received event network-changed-ce1aee7f-24df-45b4-acff-b0bbb0657cbe {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 784.556251] env[62109]: DEBUG nova.compute.manager [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Refreshing instance network info cache due to event network-changed-ce1aee7f-24df-45b4-acff-b0bbb0657cbe. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 784.556459] env[62109]: DEBUG oslo_concurrency.lockutils [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] Acquiring lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.556598] env[62109]: DEBUG oslo_concurrency.lockutils [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] Acquired lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.556751] env[62109]: DEBUG nova.network.neutron [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Refreshing network info cache for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 784.557157] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] Expecting reply to msg 229e309bb8bb4a2587d05f19df228aad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 784.563696] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 229e309bb8bb4a2587d05f19df228aad [ 784.759795] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.893779] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.894313] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 784.896437] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg c1107ab0dbd546198975ae4f42ee1cd9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 784.897506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.394s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.899067] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg b4ef52a75a4c4b0296954f5b2ef093e6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 784.936604] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4ef52a75a4c4b0296954f5b2ef093e6 [ 784.937142] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1107ab0dbd546198975ae4f42ee1cd9 [ 784.957241] env[62109]: ERROR nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. [ 784.957241] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 784.957241] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.957241] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 784.957241] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 784.957241] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 784.957241] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 784.957241] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 784.957241] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.957241] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 784.957241] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.957241] env[62109]: ERROR nova.compute.manager raise self.value [ 784.957241] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 784.957241] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 784.957241] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.957241] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 784.957796] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.957796] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 784.957796] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. [ 784.957796] env[62109]: ERROR nova.compute.manager [ 784.957796] env[62109]: Traceback (most recent call last): [ 784.957796] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 784.957796] env[62109]: listener.cb(fileno) [ 784.957796] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.957796] env[62109]: result = function(*args, **kwargs) [ 784.957796] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 784.957796] env[62109]: return func(*args, **kwargs) [ 784.957796] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.957796] env[62109]: raise e [ 784.957796] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.957796] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 784.957796] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 784.957796] env[62109]: created_port_ids = self._update_ports_for_instance( [ 784.957796] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 784.957796] env[62109]: with excutils.save_and_reraise_exception(): [ 784.957796] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.957796] env[62109]: self.force_reraise() [ 784.957796] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.957796] env[62109]: raise self.value [ 784.957796] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 784.957796] env[62109]: updated_port = self._update_port( [ 784.957796] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.957796] env[62109]: _ensure_no_port_binding_failure(port) [ 784.957796] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.957796] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 784.958804] env[62109]: nova.exception.PortBindingFailed: Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. [ 784.958804] env[62109]: Removing descriptor: 16 [ 784.958804] env[62109]: ERROR nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Traceback (most recent call last): [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] yield resources [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self.driver.spawn(context, instance, image_meta, [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self._vmops.spawn(context, instance, image_meta, injected_files, [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 784.958804] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] vm_ref = self.build_virtual_machine(instance, [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] vif_infos = vmwarevif.get_vif_info(self._session, [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] for vif in network_info: [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] return self._sync_wrapper(fn, *args, **kwargs) [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self.wait() [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self[:] = self._gt.wait() [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] return self._exit_event.wait() [ 784.959202] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] result = hub.switch() [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] return self.greenlet.switch() [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] result = function(*args, **kwargs) [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] return func(*args, **kwargs) [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] raise e [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] nwinfo = self.network_api.allocate_for_instance( [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 784.959618] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] created_port_ids = self._update_ports_for_instance( [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] with excutils.save_and_reraise_exception(): [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self.force_reraise() [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] raise self.value [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] updated_port = self._update_port( [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] _ensure_no_port_binding_failure(port) [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.960070] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] raise exception.PortBindingFailed(port_id=port['id']) [ 784.960448] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] nova.exception.PortBindingFailed: Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. [ 784.960448] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] [ 784.960448] env[62109]: INFO nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Terminating instance [ 784.961231] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.099288] env[62109]: DEBUG nova.network.neutron [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 785.302733] env[62109]: DEBUG nova.network.neutron [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.303259] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] Expecting reply to msg 2cf7d04699f44698a849c9e397cfe9b1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 785.311630] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2cf7d04699f44698a849c9e397cfe9b1 [ 785.409854] env[62109]: DEBUG nova.compute.utils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 785.409854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg d3a3e22c23fb450f802f62ed586bdddc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 785.409854] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 785.409854] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 785.418697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3a3e22c23fb450f802f62ed586bdddc [ 785.477341] env[62109]: DEBUG nova.policy [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '977af06e03fb4180b6c5cfd5ae8aa0cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '003b76634a314c8ba271d8ff8a84f4cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 785.729363] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b049b512-8873-40a6-8b81-dccefa2265b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.742272] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a5442b-2036-468b-a88e-f7b428be19e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.780896] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0094dda4-c924-4ad9-9c14-504c993178e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.788373] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfc1db7-3e4d-4c00-b097-cda849bc3471 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.802699] env[62109]: DEBUG nova.compute.provider_tree [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.803387] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 7d6ac3ba17ed4807b87dcf773ec64982 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 785.806540] env[62109]: DEBUG oslo_concurrency.lockutils [req-98c3f73b-c688-4eed-a5cf-1a077ab62205 req-96fff9f5-0775-4411-8781-603e929efaf9 service nova] Releasing lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.807019] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquired lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.807311] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 785.807793] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 6797e123ad39465781b8938624cba2e1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 785.811719] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d6ac3ba17ed4807b87dcf773ec64982 [ 785.815190] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6797e123ad39465781b8938624cba2e1 [ 785.912131] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 785.914551] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg ad4a832aed54463aa7ca9ae2d80fa5bd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 785.967039] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Successfully created port: 9e03f450-e277-4f0a-98c6-050a2f8f2359 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.972011] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad4a832aed54463aa7ca9ae2d80fa5bd [ 786.306951] env[62109]: DEBUG nova.scheduler.client.report [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.309465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg d404a11a576a4c4b9c495690ab78b693 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 786.324451] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d404a11a576a4c4b9c495690ab78b693 [ 786.335825] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.419234] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 3aae538d9991484b8465625dadf2e43e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 786.436818] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.437900] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg dec3c93f5fdd49e48f27bdf72cd7bcb7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 786.450655] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dec3c93f5fdd49e48f27bdf72cd7bcb7 [ 786.455798] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3aae538d9991484b8465625dadf2e43e [ 786.578107] env[62109]: DEBUG nova.compute.manager [req-86f64aa4-3d6c-43e6-82fa-6c2422c44d1c req-2ac59ce5-1f81-4f0a-8723-0809a007a12e service nova] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Received event network-vif-deleted-ce1aee7f-24df-45b4-acff-b0bbb0657cbe {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 786.817735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.918s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.817735] env[62109]: ERROR nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. [ 786.817735] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Traceback (most recent call last): [ 786.817735] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 786.817735] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self.driver.spawn(context, instance, image_meta, [ 786.817735] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 786.817735] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.817735] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.817735] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] vm_ref = self.build_virtual_machine(instance, [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] for vif in network_info: [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] return self._sync_wrapper(fn, *args, **kwargs) [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self.wait() [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self[:] = self._gt.wait() [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] return self._exit_event.wait() [ 786.818201] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] result = hub.switch() [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] return self.greenlet.switch() [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] result = function(*args, **kwargs) [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] return func(*args, **kwargs) [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] raise e [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] nwinfo = self.network_api.allocate_for_instance( [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 786.818598] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] created_port_ids = self._update_ports_for_instance( [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] with excutils.save_and_reraise_exception(): [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] self.force_reraise() [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] raise self.value [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] updated_port = self._update_port( [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] _ensure_no_port_binding_failure(port) [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.818958] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] raise exception.PortBindingFailed(port_id=port['id']) [ 786.819387] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] nova.exception.PortBindingFailed: Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. [ 786.819387] env[62109]: ERROR nova.compute.manager [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] [ 786.819387] env[62109]: DEBUG nova.compute.utils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.819387] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.191s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.819797] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 6e03118c30b848258445bad6c0286670 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 786.820977] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Build of instance 87304cf6-e65f-41de-ab6f-d2170aaa9064 was re-scheduled: Binding failed for port 1c6a3483-455d-4776-a8a1-23760212d4ef, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 786.821389] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 786.821621] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.821761] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquired lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.821910] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 786.822270] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 5a18a3e749af4e05be3b3c85d049b934 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 786.828649] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a18a3e749af4e05be3b3c85d049b934 [ 786.858283] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e03118c30b848258445bad6c0286670 [ 786.922374] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 786.941436] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Releasing lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.941824] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 786.942008] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 786.943019] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72b58f52-b607-4679-93f6-ee24e4f485d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.956078] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37216ca5-279e-41dc-b98f-6daca32fd7b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.966526] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 786.967468] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 786.967468] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.967468] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 786.967468] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.967468] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 786.967822] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 786.968048] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 786.968241] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 786.968405] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 786.968878] env[62109]: DEBUG nova.virt.hardware [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.969854] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58bcf01-5e8f-4016-8082-4b8954e34579 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.979157] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d6e43a-fd69-40c7-adb7-0f046c1298b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.988295] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 08638aac-2c6c-4580-9894-6b3b3c1ec484 could not be found. [ 786.988504] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 786.988682] env[62109]: INFO nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Took 0.05 seconds to destroy the instance on the hypervisor. [ 786.989187] env[62109]: DEBUG oslo.service.loopingcall [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.990221] env[62109]: DEBUG nova.compute.manager [-] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 786.990329] env[62109]: DEBUG nova.network.neutron [-] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 787.014204] env[62109]: DEBUG nova.network.neutron [-] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.014711] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1beb1512141a484a9d131718978f5c11 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 787.021276] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1beb1512141a484a9d131718978f5c11 [ 787.157696] env[62109]: ERROR nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. [ 787.157696] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 787.157696] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.157696] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 787.157696] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 787.157696] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 787.157696] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 787.157696] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 787.157696] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.157696] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 787.157696] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.157696] env[62109]: ERROR nova.compute.manager raise self.value [ 787.157696] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 787.157696] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 787.157696] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.157696] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 787.158427] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.158427] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 787.158427] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. [ 787.158427] env[62109]: ERROR nova.compute.manager [ 787.158427] env[62109]: Traceback (most recent call last): [ 787.158427] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 787.158427] env[62109]: listener.cb(fileno) [ 787.158427] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.158427] env[62109]: result = function(*args, **kwargs) [ 787.158427] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 787.158427] env[62109]: return func(*args, **kwargs) [ 787.158427] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 787.158427] env[62109]: raise e [ 787.158427] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.158427] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 787.158427] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 787.158427] env[62109]: created_port_ids = self._update_ports_for_instance( [ 787.158427] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 787.158427] env[62109]: with excutils.save_and_reraise_exception(): [ 787.158427] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.158427] env[62109]: self.force_reraise() [ 787.158427] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.158427] env[62109]: raise self.value [ 787.158427] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 787.158427] env[62109]: updated_port = self._update_port( [ 787.158427] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.158427] env[62109]: _ensure_no_port_binding_failure(port) [ 787.158427] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.158427] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 787.159515] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. [ 787.159515] env[62109]: Removing descriptor: 16 [ 787.159515] env[62109]: ERROR nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Traceback (most recent call last): [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] yield resources [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self.driver.spawn(context, instance, image_meta, [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 787.159515] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] vm_ref = self.build_virtual_machine(instance, [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] vif_infos = vmwarevif.get_vif_info(self._session, [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] for vif in network_info: [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] return self._sync_wrapper(fn, *args, **kwargs) [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self.wait() [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self[:] = self._gt.wait() [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] return self._exit_event.wait() [ 787.159917] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] result = hub.switch() [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] return self.greenlet.switch() [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] result = function(*args, **kwargs) [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] return func(*args, **kwargs) [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] raise e [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] nwinfo = self.network_api.allocate_for_instance( [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 787.160395] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] created_port_ids = self._update_ports_for_instance( [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] with excutils.save_and_reraise_exception(): [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self.force_reraise() [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] raise self.value [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] updated_port = self._update_port( [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] _ensure_no_port_binding_failure(port) [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.160838] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] raise exception.PortBindingFailed(port_id=port['id']) [ 787.161213] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] nova.exception.PortBindingFailed: Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. [ 787.161213] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] [ 787.161213] env[62109]: INFO nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Terminating instance [ 787.161213] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.161213] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquired lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.161386] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 787.161768] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 554fe7cb12b84177a0488788dabe0c6a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 787.169468] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 554fe7cb12b84177a0488788dabe0c6a [ 787.366888] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.440663] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.441978] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 3edcef7f6ae946abbabd6a2a02a4f101 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 787.448978] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3edcef7f6ae946abbabd6a2a02a4f101 [ 787.517042] env[62109]: DEBUG nova.network.neutron [-] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.517203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 738040a514fa4596a734f5e8a42fe26f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 787.525396] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 738040a514fa4596a734f5e8a42fe26f [ 787.665626] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b4ce48-0d5c-426f-812f-59ab5f6ab0d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.677009] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e62408d-eb27-4963-9bff-644af51d5015 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.709388] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.711678] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e67615-e8bc-4c17-a559-a9e3373c7366 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.721123] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a094cd1d-9993-4a6d-b283-fcf33fe53acd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.737764] env[62109]: DEBUG nova.compute.provider_tree [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.738532] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 15feae8e67354e4791b95bda87083330 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 787.747593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15feae8e67354e4791b95bda87083330 [ 787.793374] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.793876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg c837526ff48c49668bc8f1e1de124a6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 787.803215] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c837526ff48c49668bc8f1e1de124a6d [ 787.944974] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Releasing lock "refresh_cache-87304cf6-e65f-41de-ab6f-d2170aaa9064" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.945261] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 787.945396] env[62109]: DEBUG nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 787.945585] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 787.965173] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.965749] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 17730f3ed1914582b2afec7cfe642a33 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 787.973799] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17730f3ed1914582b2afec7cfe642a33 [ 788.019054] env[62109]: INFO nova.compute.manager [-] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Took 1.03 seconds to deallocate network for instance. [ 788.021386] env[62109]: DEBUG nova.compute.claims [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 788.021563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.241339] env[62109]: DEBUG nova.scheduler.client.report [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 788.247555] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 1b5dbe39f2f5400b98ea4ee67daeae51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 788.263748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b5dbe39f2f5400b98ea4ee67daeae51 [ 788.295691] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Releasing lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.296216] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 788.297572] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 788.297572] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72001910-7d4d-4f48-90a1-d213cdcdc801 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.314147] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b344f3e9-36dd-4a26-a0b2-5622aec72fa4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.337381] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66a0a424-ecb6-43df-9b47-946ff1e1b7b2 could not be found. [ 788.337607] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 788.337802] env[62109]: INFO nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 788.338045] env[62109]: DEBUG oslo.service.loopingcall [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.338272] env[62109]: DEBUG nova.compute.manager [-] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 788.338365] env[62109]: DEBUG nova.network.neutron [-] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 788.357739] env[62109]: DEBUG nova.network.neutron [-] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 788.358252] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6e224f930ba148fbbe047b7a78b86f60 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 788.364705] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e224f930ba148fbbe047b7a78b86f60 [ 788.467710] env[62109]: DEBUG nova.network.neutron [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.468273] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg a0ea23331c55497b9b941f81ce938f10 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 788.476429] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0ea23331c55497b9b941f81ce938f10 [ 788.610400] env[62109]: DEBUG nova.compute.manager [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Received event network-changed-9e03f450-e277-4f0a-98c6-050a2f8f2359 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 788.610588] env[62109]: DEBUG nova.compute.manager [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Refreshing instance network info cache due to event network-changed-9e03f450-e277-4f0a-98c6-050a2f8f2359. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 788.610837] env[62109]: DEBUG oslo_concurrency.lockutils [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] Acquiring lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.610929] env[62109]: DEBUG oslo_concurrency.lockutils [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] Acquired lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.611082] env[62109]: DEBUG nova.network.neutron [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Refreshing network info cache for port 9e03f450-e277-4f0a-98c6-050a2f8f2359 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 788.611552] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] Expecting reply to msg 02bda4cc19764ee5ab5fcace026f3570 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 788.618017] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02bda4cc19764ee5ab5fcace026f3570 [ 788.750541] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.932s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.751168] env[62109]: ERROR nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Traceback (most recent call last): [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self.driver.spawn(context, instance, image_meta, [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] vm_ref = self.build_virtual_machine(instance, [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] vif_infos = vmwarevif.get_vif_info(self._session, [ 788.751168] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] for vif in network_info: [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] return self._sync_wrapper(fn, *args, **kwargs) [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self.wait() [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self[:] = self._gt.wait() [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] return self._exit_event.wait() [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] result = hub.switch() [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 788.751543] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] return self.greenlet.switch() [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] result = function(*args, **kwargs) [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] return func(*args, **kwargs) [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] raise e [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] nwinfo = self.network_api.allocate_for_instance( [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] created_port_ids = self._update_ports_for_instance( [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] with excutils.save_and_reraise_exception(): [ 788.751922] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] self.force_reraise() [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] raise self.value [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] updated_port = self._update_port( [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] _ensure_no_port_binding_failure(port) [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] raise exception.PortBindingFailed(port_id=port['id']) [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] nova.exception.PortBindingFailed: Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. [ 788.752358] env[62109]: ERROR nova.compute.manager [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] [ 788.752710] env[62109]: DEBUG nova.compute.utils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 788.753051] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.069s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.754490] env[62109]: INFO nova.compute.claims [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.756080] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 9167f3b1e2ee46debfb46ca1982cbf6b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 788.757257] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Build of instance 90c50f92-c1ff-4ac9-a819-ae0083884e28 was re-scheduled: Binding failed for port 27d29d58-7775-4338-8153-6267d4a560a3, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 788.757746] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 788.758230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquiring lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.758432] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Acquired lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.758594] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 788.758977] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 281217b27ebe4f21829aa5bdf6f55169 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 788.773462] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 281217b27ebe4f21829aa5bdf6f55169 [ 788.794430] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9167f3b1e2ee46debfb46ca1982cbf6b [ 788.859964] env[62109]: DEBUG nova.network.neutron [-] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.860453] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 30db91dcd0394b9e8f106868c95b11be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 788.868713] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30db91dcd0394b9e8f106868c95b11be [ 788.970372] env[62109]: INFO nova.compute.manager [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 87304cf6-e65f-41de-ab6f-d2170aaa9064] Took 1.02 seconds to deallocate network for instance. [ 788.972203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 2f4d16beba61407c8dd414913a4e3ce7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 789.004580] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f4d16beba61407c8dd414913a4e3ce7 [ 789.262394] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 2327121bd4f14482bf0d2b773bf35e93 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 789.270826] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2327121bd4f14482bf0d2b773bf35e93 [ 789.273880] env[62109]: DEBUG nova.network.neutron [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 789.292663] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 789.335396] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.335930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 510747b2651a4d30a7ef915e667dacf6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 789.343850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 510747b2651a4d30a7ef915e667dacf6 [ 789.345274] env[62109]: DEBUG nova.network.neutron [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.345721] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] Expecting reply to msg 9f98bc6a69954608baec94c5d5c9824d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 789.353966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f98bc6a69954608baec94c5d5c9824d [ 789.362325] env[62109]: INFO nova.compute.manager [-] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Took 1.02 seconds to deallocate network for instance. [ 789.364668] env[62109]: DEBUG nova.compute.claims [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 789.364843] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.476779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 9eb2560afd584351b1fdf3377172ba16 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 789.511092] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9eb2560afd584351b1fdf3377172ba16 [ 789.838236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Releasing lock "refresh_cache-90c50f92-c1ff-4ac9-a819-ae0083884e28" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.838469] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 789.838626] env[62109]: DEBUG nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 789.838783] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 789.847303] env[62109]: DEBUG oslo_concurrency.lockutils [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] Releasing lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.847603] env[62109]: DEBUG nova.compute.manager [req-92abb534-8d43-4903-9136-2735e02acb8f req-02a87273-914d-41ef-bdb6-433e1fdac292 service nova] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Received event network-vif-deleted-9e03f450-e277-4f0a-98c6-050a2f8f2359 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 789.856701] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 789.857247] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg b2c504c57da94a37a1880d46f3bcd345 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 789.864036] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2c504c57da94a37a1880d46f3bcd345 [ 789.997379] env[62109]: INFO nova.scheduler.client.report [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Deleted allocations for instance 87304cf6-e65f-41de-ab6f-d2170aaa9064 [ 790.005944] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg cc5bef4068fc421d9e6abf494e11460c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 790.021177] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc5bef4068fc421d9e6abf494e11460c [ 790.031705] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abff9b5e-6600-472d-9e00-487335eac612 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.039518] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d09febd-c2b0-4d16-b066-93f6dc996490 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.069777] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2081f0b-49a5-4ccb-a203-5c08fa8a47fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.076846] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6e4288-dcfa-4529-8bcc-309ede2e5044 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.089499] env[62109]: DEBUG nova.compute.provider_tree [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.089994] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 56568e75d0244d769b6d38faddc4977a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 790.096255] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56568e75d0244d769b6d38faddc4977a [ 790.359531] env[62109]: DEBUG nova.network.neutron [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.360077] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 3419272ba67b4a35882fcaf9430c7c17 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 790.368202] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3419272ba67b4a35882fcaf9430c7c17 [ 790.511729] env[62109]: DEBUG oslo_concurrency.lockutils [None req-91380fdb-bfd1-40aa-a541-58dca23365be tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "87304cf6-e65f-41de-ab6f-d2170aaa9064" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.346s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.512400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 61a5865033bf41a6a3c1b0fb9cc4b9ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 790.522625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61a5865033bf41a6a3c1b0fb9cc4b9ec [ 790.592484] env[62109]: DEBUG nova.scheduler.client.report [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.594852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg aedfa53ea0ce45f986a793b23088fc6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 790.606464] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aedfa53ea0ce45f986a793b23088fc6d [ 790.862714] env[62109]: INFO nova.compute.manager [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] [instance: 90c50f92-c1ff-4ac9-a819-ae0083884e28] Took 1.02 seconds to deallocate network for instance. [ 790.864967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg af400b61c2ea4017962cc7b724aad7b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 790.916790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af400b61c2ea4017962cc7b724aad7b5 [ 791.014075] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 791.015974] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg d21b19a732e84297901d5b5b5e46b163 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 791.052569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d21b19a732e84297901d5b5b5e46b163 [ 791.097266] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.097792] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 791.099438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 191cebdea298429095d0f6f50cf9b150 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 791.100746] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.544s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.102102] env[62109]: INFO nova.compute.claims [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.103524] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 681164fb1e81474ca94f0bfa9724478d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 791.129133] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 191cebdea298429095d0f6f50cf9b150 [ 791.131901] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 681164fb1e81474ca94f0bfa9724478d [ 791.370618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg 89b0364ffa4546d09ccf77504757f961 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 791.408567] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89b0364ffa4546d09ccf77504757f961 [ 791.532316] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.607183] env[62109]: DEBUG nova.compute.utils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.607830] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 7eb9fb33d7264328844753bbe87d769d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 791.609844] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg fded016095664891a0e4fad2f3f76ae3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 791.610835] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 791.610998] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 791.617343] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fded016095664891a0e4fad2f3f76ae3 [ 791.618588] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7eb9fb33d7264328844753bbe87d769d [ 791.675546] env[62109]: DEBUG nova.policy [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0c3dc5a397c4f1e9118b36d504166f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9cb6ab6d9d64c6eb3d4d8d37563f6cc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 791.896153] env[62109]: INFO nova.scheduler.client.report [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Deleted allocations for instance 90c50f92-c1ff-4ac9-a819-ae0083884e28 [ 791.901851] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Expecting reply to msg bccba9f9faf94bbb853594161e0e26e6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 791.915565] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bccba9f9faf94bbb853594161e0e26e6 [ 792.053192] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Successfully created port: a1df0846-1c39-4992-9a51-d7ac2e394a3c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 792.111944] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 792.113680] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 8ae225bbec004cf692b0ab9a9f84c84a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 792.144940] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ae225bbec004cf692b0ab9a9f84c84a [ 792.390589] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e85248-d089-4dcf-aba8-ce4bfa9a6d4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.398141] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a45262-6af5-4d92-a124-cc9c7b8fb3d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.404176] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cbde3920-e9fb-4541-a32b-dc71604200bd tempest-ListImageFiltersTestJSON-1730796357 tempest-ListImageFiltersTestJSON-1730796357-project-member] Lock "90c50f92-c1ff-4ac9-a819-ae0083884e28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.939s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.429466] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 9573451d332e4fce8f1205474b458e4d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 792.431445] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c9af6d-f6c5-4101-b3ed-ee9be1e34fa7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.438675] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b8d42e-2b60-4cae-828f-524a6716f9c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.442828] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9573451d332e4fce8f1205474b458e4d [ 792.453051] env[62109]: DEBUG nova.compute.provider_tree [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.453551] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg fade83bad8104682a55a7e7b26ba1e0f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 792.464496] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fade83bad8104682a55a7e7b26ba1e0f [ 792.621866] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg f25d58366f394cf189b900f26f61e1de in queue reply_7522b64acfeb4981b1f36928b040d568 [ 792.656673] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f25d58366f394cf189b900f26f61e1de [ 792.786147] env[62109]: DEBUG nova.compute.manager [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Received event network-changed-a1df0846-1c39-4992-9a51-d7ac2e394a3c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 792.786147] env[62109]: DEBUG nova.compute.manager [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Refreshing instance network info cache due to event network-changed-a1df0846-1c39-4992-9a51-d7ac2e394a3c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 792.786147] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] Acquiring lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.786147] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] Acquired lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.786147] env[62109]: DEBUG nova.network.neutron [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Refreshing network info cache for port a1df0846-1c39-4992-9a51-d7ac2e394a3c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 792.786289] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] Expecting reply to msg 3c3ab96660024d5784407ed80cb9646c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 792.801246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c3ab96660024d5784407ed80cb9646c [ 792.931864] env[62109]: DEBUG nova.compute.manager [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 792.933723] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 26f0d05045c54aa1b408d5683d235dca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 792.957962] env[62109]: DEBUG nova.scheduler.client.report [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 792.960519] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 700336cc2ca843f2b9c8d146b335bcd0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 792.973931] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 700336cc2ca843f2b9c8d146b335bcd0 [ 792.974950] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26f0d05045c54aa1b408d5683d235dca [ 792.997138] env[62109]: ERROR nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. [ 792.997138] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 792.997138] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 792.997138] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 792.997138] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 792.997138] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 792.997138] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 792.997138] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 792.997138] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 792.997138] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 792.997138] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 792.997138] env[62109]: ERROR nova.compute.manager raise self.value [ 792.997138] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 792.997138] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 792.997138] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 792.997138] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 792.997477] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 792.997477] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 792.997477] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. [ 792.997477] env[62109]: ERROR nova.compute.manager [ 792.997477] env[62109]: Traceback (most recent call last): [ 792.997477] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 792.997477] env[62109]: listener.cb(fileno) [ 792.997477] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 792.997477] env[62109]: result = function(*args, **kwargs) [ 792.997477] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 792.997477] env[62109]: return func(*args, **kwargs) [ 792.997477] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 792.997477] env[62109]: raise e [ 792.997477] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 792.997477] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 792.997477] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 792.997477] env[62109]: created_port_ids = self._update_ports_for_instance( [ 792.997477] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 792.997477] env[62109]: with excutils.save_and_reraise_exception(): [ 792.997477] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 792.997477] env[62109]: self.force_reraise() [ 792.997477] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 792.997477] env[62109]: raise self.value [ 792.997477] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 792.997477] env[62109]: updated_port = self._update_port( [ 792.997477] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 792.997477] env[62109]: _ensure_no_port_binding_failure(port) [ 792.997477] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 792.997477] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 792.998129] env[62109]: nova.exception.PortBindingFailed: Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. [ 792.998129] env[62109]: Removing descriptor: 16 [ 793.124854] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 793.153077] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 793.153325] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 793.153471] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.153644] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 793.153785] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.153925] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 793.154125] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 793.154279] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 793.154435] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 793.154589] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 793.154750] env[62109]: DEBUG nova.virt.hardware [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 793.155680] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6d0591-77f8-4a67-a509-72230f91d7e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.164167] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5bc84b7-cc8d-41d2-8c45-40837b4a5728 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.178662] env[62109]: ERROR nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Traceback (most recent call last): [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] yield resources [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self.driver.spawn(context, instance, image_meta, [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] vm_ref = self.build_virtual_machine(instance, [ 793.178662] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] vif_infos = vmwarevif.get_vif_info(self._session, [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] for vif in network_info: [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] return self._sync_wrapper(fn, *args, **kwargs) [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self.wait() [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self[:] = self._gt.wait() [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] return self._exit_event.wait() [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 793.179254] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] current.throw(*self._exc) [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] result = function(*args, **kwargs) [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] return func(*args, **kwargs) [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] raise e [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] nwinfo = self.network_api.allocate_for_instance( [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] created_port_ids = self._update_ports_for_instance( [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] with excutils.save_and_reraise_exception(): [ 793.179795] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self.force_reraise() [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] raise self.value [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] updated_port = self._update_port( [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] _ensure_no_port_binding_failure(port) [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] raise exception.PortBindingFailed(port_id=port['id']) [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] nova.exception.PortBindingFailed: Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. [ 793.180397] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] [ 793.180397] env[62109]: INFO nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Terminating instance [ 793.181305] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.310605] env[62109]: DEBUG nova.network.neutron [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.399959] env[62109]: DEBUG nova.network.neutron [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.400743] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] Expecting reply to msg 5ab4c58a2a4a41cfb7c32bfdf706893e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 793.409413] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ab4c58a2a4a41cfb7c32bfdf706893e [ 793.451822] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.462702] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.463186] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 793.465813] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 4fbfa5b95e7a475eb0599d3c61cb1bd3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 793.466803] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.176s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.469666] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 96239601d9174c3284938e19d4104728 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 793.500523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fbfa5b95e7a475eb0599d3c61cb1bd3 [ 793.507220] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96239601d9174c3284938e19d4104728 [ 793.905086] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a0fc1ef-282a-4c14-9827-a0abebaf071b req-3162ae1d-38af-4411-9a41-f2b2f68a6feb service nova] Releasing lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.905086] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquired lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.905086] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 793.905086] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg f69a7b36ed204933821f83c2cefd22a4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 793.912207] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f69a7b36ed204933821f83c2cefd22a4 [ 793.976021] env[62109]: DEBUG nova.compute.utils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 793.976872] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 01a693331e8d438ebd382b685333451c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 793.978428] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 793.978712] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 793.989520] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01a693331e8d438ebd382b685333451c [ 794.028802] env[62109]: DEBUG nova.policy [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0c3dc5a397c4f1e9118b36d504166f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9cb6ab6d9d64c6eb3d4d8d37563f6cc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 794.297594] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69061fe4-e88b-49cc-910e-3e8aae50fa57 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.302761] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0f5a9c-9479-49f8-b876-b71887a1b515 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.345862] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e30a4c-de2c-466d-a4b3-3f6bed27b5b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.359032] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8866610-889d-49d4-aef4-e5fd4942b99a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.363681] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Successfully created port: f971e1a0-a370-44fc-acd0-66f2e04e3b60 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.374757] env[62109]: DEBUG nova.compute.provider_tree [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.375357] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 53ebddfd18c3442994f2d3b24ed4a87f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 794.384699] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53ebddfd18c3442994f2d3b24ed4a87f [ 794.431594] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 794.482195] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 794.483854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 1a89f90a080c4e74b61ee627c2a20c2c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 794.518033] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.518566] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg cab5544c2609499f93a1e1368449f258 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 794.526675] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cab5544c2609499f93a1e1368449f258 [ 794.530217] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a89f90a080c4e74b61ee627c2a20c2c [ 794.865009] env[62109]: DEBUG nova.compute.manager [req-d7e0df92-07c3-4e07-b1a5-bd2a06434378 req-463d9852-c8ac-4741-92d7-48cfb931bb6e service nova] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Received event network-vif-deleted-a1df0846-1c39-4992-9a51-d7ac2e394a3c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 794.879076] env[62109]: DEBUG nova.scheduler.client.report [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.881448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 2869c92ee95d407188d298d97d991b33 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 794.893102] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2869c92ee95d407188d298d97d991b33 [ 794.989269] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg f565e4356b6847dcb325c469748240d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 795.021290] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Releasing lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.021402] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 795.021505] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 795.021786] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b451634-648f-4127-9f47-be5e1de155e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.025397] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f565e4356b6847dcb325c469748240d7 [ 795.031182] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d75bd0f-0d60-4beb-9d3a-0bb389b75eaf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.053099] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e8c77459-e3a3-4a68-9f76-0757dd0f2587 could not be found. [ 795.053327] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 795.053503] env[62109]: INFO nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Took 0.03 seconds to destroy the instance on the hypervisor. [ 795.053745] env[62109]: DEBUG oslo.service.loopingcall [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 795.053958] env[62109]: DEBUG nova.compute.manager [-] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 795.054051] env[62109]: DEBUG nova.network.neutron [-] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 795.088873] env[62109]: DEBUG nova.network.neutron [-] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.089475] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 50239007da6245a5ad25e105b54f29d2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 795.096359] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50239007da6245a5ad25e105b54f29d2 [ 795.284654] env[62109]: ERROR nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. [ 795.284654] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 795.284654] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 795.284654] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 795.284654] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 795.284654] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 795.284654] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 795.284654] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 795.284654] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.284654] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 795.284654] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.284654] env[62109]: ERROR nova.compute.manager raise self.value [ 795.284654] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 795.284654] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 795.284654] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.284654] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 795.285043] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.285043] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 795.285043] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. [ 795.285043] env[62109]: ERROR nova.compute.manager [ 795.285043] env[62109]: Traceback (most recent call last): [ 795.285043] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 795.285043] env[62109]: listener.cb(fileno) [ 795.285043] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 795.285043] env[62109]: result = function(*args, **kwargs) [ 795.285043] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 795.285043] env[62109]: return func(*args, **kwargs) [ 795.285043] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 795.285043] env[62109]: raise e [ 795.285043] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 795.285043] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 795.285043] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 795.285043] env[62109]: created_port_ids = self._update_ports_for_instance( [ 795.285043] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 795.285043] env[62109]: with excutils.save_and_reraise_exception(): [ 795.285043] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.285043] env[62109]: self.force_reraise() [ 795.285043] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.285043] env[62109]: raise self.value [ 795.285043] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 795.285043] env[62109]: updated_port = self._update_port( [ 795.285043] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.285043] env[62109]: _ensure_no_port_binding_failure(port) [ 795.285043] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.285043] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 795.285720] env[62109]: nova.exception.PortBindingFailed: Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. [ 795.285720] env[62109]: Removing descriptor: 16 [ 795.400305] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.917s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.400305] env[62109]: ERROR nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. [ 795.400305] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Traceback (most recent call last): [ 795.400305] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 795.400305] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self.driver.spawn(context, instance, image_meta, [ 795.400305] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 795.400305] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 795.400305] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 795.400305] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] vm_ref = self.build_virtual_machine(instance, [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] vif_infos = vmwarevif.get_vif_info(self._session, [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] for vif in network_info: [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] return self._sync_wrapper(fn, *args, **kwargs) [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self.wait() [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self[:] = self._gt.wait() [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] return self._exit_event.wait() [ 795.400755] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] current.throw(*self._exc) [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] result = function(*args, **kwargs) [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] return func(*args, **kwargs) [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] raise e [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] nwinfo = self.network_api.allocate_for_instance( [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] created_port_ids = self._update_ports_for_instance( [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 795.401020] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] with excutils.save_and_reraise_exception(): [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] self.force_reraise() [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] raise self.value [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] updated_port = self._update_port( [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] _ensure_no_port_binding_failure(port) [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] raise exception.PortBindingFailed(port_id=port['id']) [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] nova.exception.PortBindingFailed: Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. [ 795.401296] env[62109]: ERROR nova.compute.manager [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] [ 795.401559] env[62109]: DEBUG nova.compute.utils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 795.401559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.862s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.401559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.401559] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 795.401559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.948s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.401686] env[62109]: INFO nova.compute.claims [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.401686] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 0af8de05f77f4adfb00d36d7a3a77cf6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 795.403230] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Build of instance ae026dca-dc05-4710-8a03-4e792a0dc61d was re-scheduled: Binding failed for port b2910613-b431-4ef8-9ab4-20a3b9638084, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 795.403843] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 795.404183] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Acquiring lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.404951] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Acquired lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.405256] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 795.405763] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 17cd0e1d17034399bde68fc818ffeaab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 795.407723] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ec3b78-a65e-4a26-8486-c9c88a0ddc4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.422792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17cd0e1d17034399bde68fc818ffeaab [ 795.424310] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b5b35f-50b5-4114-8d06-38c85cbc2f05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.443565] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0af8de05f77f4adfb00d36d7a3a77cf6 [ 795.447215] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601e805f-924d-4aa1-817d-497f3f9ab237 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.454588] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d4218a-77d1-4fba-98a4-1aaf8fd7ed5a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.483598] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181687MB free_disk=124GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 795.483747] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.501524] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 795.527440] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 795.527708] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 795.527863] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.528083] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 795.528245] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.528390] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 795.528610] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 795.528766] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 795.528947] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 795.529111] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 795.529277] env[62109]: DEBUG nova.virt.hardware [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 795.530194] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14da36d2-cccb-4004-bbbb-54a631d81c3b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.538397] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81689f5-9115-466d-9d27-9433ee7cecd8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.552139] env[62109]: ERROR nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Traceback (most recent call last): [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] yield resources [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self.driver.spawn(context, instance, image_meta, [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self._vmops.spawn(context, instance, image_meta, injected_files, [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] vm_ref = self.build_virtual_machine(instance, [ 795.552139] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] vif_infos = vmwarevif.get_vif_info(self._session, [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] for vif in network_info: [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] return self._sync_wrapper(fn, *args, **kwargs) [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self.wait() [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self[:] = self._gt.wait() [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] return self._exit_event.wait() [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 795.552410] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] current.throw(*self._exc) [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] result = function(*args, **kwargs) [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] return func(*args, **kwargs) [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] raise e [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] nwinfo = self.network_api.allocate_for_instance( [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] created_port_ids = self._update_ports_for_instance( [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] with excutils.save_and_reraise_exception(): [ 795.552772] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self.force_reraise() [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] raise self.value [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] updated_port = self._update_port( [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] _ensure_no_port_binding_failure(port) [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] raise exception.PortBindingFailed(port_id=port['id']) [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] nova.exception.PortBindingFailed: Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. [ 795.553038] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] [ 795.553038] env[62109]: INFO nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Terminating instance [ 795.554426] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.554580] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquired lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.554739] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 795.556245] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 48c4027d164340cc946f0a503b3a8efb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 795.563151] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48c4027d164340cc946f0a503b3a8efb [ 795.591103] env[62109]: DEBUG nova.network.neutron [-] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.591550] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7a239e392d1f4e50a0f8a83166de6dde in queue reply_7522b64acfeb4981b1f36928b040d568 [ 795.599956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a239e392d1f4e50a0f8a83166de6dde [ 795.915056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 0beba4b2457b4a079e35f98fd3168a48 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 795.922848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0beba4b2457b4a079e35f98fd3168a48 [ 795.926617] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 796.000522] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.001069] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg c0fe5fcd7778483a9135112871d3f733 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 796.008839] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0fe5fcd7778483a9135112871d3f733 [ 796.079519] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 796.093640] env[62109]: INFO nova.compute.manager [-] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Took 1.04 seconds to deallocate network for instance. [ 796.095715] env[62109]: DEBUG nova.compute.claims [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 796.096027] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.145255] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.145904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg cd9cac166f234b18b83479f2ed0020a4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 796.154765] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd9cac166f234b18b83479f2ed0020a4 [ 796.503107] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Releasing lock "refresh_cache-ae026dca-dc05-4710-8a03-4e792a0dc61d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.503372] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 796.503522] env[62109]: DEBUG nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 796.503687] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 796.520089] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 796.520465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 16836582e66f43018d4a682f72877ec0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 796.530074] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16836582e66f43018d4a682f72877ec0 [ 796.647728] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Releasing lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.648889] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 796.648889] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 796.648889] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b94d2175-d058-4f26-9f9c-3845d4f2664c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.659604] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbe580f-88b8-4f40-9ef5-798bcf7b712d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.685666] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9a1c4327-64b3-4c4d-b6ae-77959084b405 could not be found. [ 796.685988] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 796.686230] env[62109]: INFO nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Took 0.04 seconds to destroy the instance on the hypervisor. [ 796.686505] env[62109]: DEBUG oslo.service.loopingcall [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.686792] env[62109]: DEBUG nova.compute.manager [-] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 796.686916] env[62109]: DEBUG nova.network.neutron [-] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 796.694073] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfff7a9-5810-4d9c-8984-0e0482a8baf5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.700460] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce02f00-3cc2-4c2e-9c0d-b72d9b824a1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.706259] env[62109]: DEBUG nova.network.neutron [-] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 796.706731] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 57be2b811e89432a887fc91d5962de4a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 796.730960] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57be2b811e89432a887fc91d5962de4a [ 796.731650] env[62109]: DEBUG nova.network.neutron [-] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.732170] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f696292294164eb796c9f3892f9f13bf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 796.733566] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84051aaa-9c2b-48a8-8e04-bdddc2595dba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.740374] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f696292294164eb796c9f3892f9f13bf [ 796.741648] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdc7664-378b-4bb9-b234-cfb736b4def1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.755696] env[62109]: DEBUG nova.compute.provider_tree [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.756189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 0f0e18c6e2064a5092d0a121f34f057b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 796.762139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f0e18c6e2064a5092d0a121f34f057b [ 796.888713] env[62109]: DEBUG nova.compute.manager [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Received event network-changed-f971e1a0-a370-44fc-acd0-66f2e04e3b60 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 796.888929] env[62109]: DEBUG nova.compute.manager [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Refreshing instance network info cache due to event network-changed-f971e1a0-a370-44fc-acd0-66f2e04e3b60. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 796.889161] env[62109]: DEBUG oslo_concurrency.lockutils [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] Acquiring lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.889308] env[62109]: DEBUG oslo_concurrency.lockutils [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] Acquired lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.889464] env[62109]: DEBUG nova.network.neutron [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Refreshing network info cache for port f971e1a0-a370-44fc-acd0-66f2e04e3b60 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 796.889871] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] Expecting reply to msg 04e630d6c9bc4653bfaa12025769629d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 796.896674] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04e630d6c9bc4653bfaa12025769629d [ 797.025023] env[62109]: DEBUG nova.network.neutron [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.025598] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg a3628f58c3dc4e259d872e06cb195526 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 797.033584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3628f58c3dc4e259d872e06cb195526 [ 797.237173] env[62109]: INFO nova.compute.manager [-] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Took 0.55 seconds to deallocate network for instance. [ 797.239719] env[62109]: DEBUG nova.compute.claims [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 797.239908] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.258944] env[62109]: DEBUG nova.scheduler.client.report [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 797.261322] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 1a71561d02b744dabdad86f417032304 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 797.273166] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a71561d02b744dabdad86f417032304 [ 797.405058] env[62109]: DEBUG nova.network.neutron [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.474176] env[62109]: DEBUG nova.network.neutron [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.474704] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] Expecting reply to msg 0f568cc6a6884ad59c3d113df2180175 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 797.482085] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f568cc6a6884ad59c3d113df2180175 [ 797.528504] env[62109]: INFO nova.compute.manager [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] [instance: ae026dca-dc05-4710-8a03-4e792a0dc61d] Took 1.02 seconds to deallocate network for instance. [ 797.530419] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg abf988c15b8f46f4adf00310894d1dcd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 797.561132] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abf988c15b8f46f4adf00310894d1dcd [ 797.764343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.764643] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 797.766497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg cf2028dab0e64c1e85417ed3dab85a1b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 797.767898] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.481s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.771400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg c9223f350df74da88203e30469df3377 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 797.796638] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf2028dab0e64c1e85417ed3dab85a1b [ 797.802033] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9223f350df74da88203e30469df3377 [ 797.977003] env[62109]: DEBUG oslo_concurrency.lockutils [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] Releasing lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.977003] env[62109]: DEBUG nova.compute.manager [req-49088bb4-cc61-4022-947d-8a8e75758601 req-9a38fb66-0466-449a-8dc6-76067c3ec61f service nova] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Received event network-vif-deleted-f971e1a0-a370-44fc-acd0-66f2e04e3b60 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 798.035794] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 4f5a72d12b5b4276bc61f3fe29dafefa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 798.068220] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f5a72d12b5b4276bc61f3fe29dafefa [ 798.274310] env[62109]: DEBUG nova.compute.utils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.274954] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg a3ff095a6a274932aae6fea3772aa432 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 798.280029] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 798.280029] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 798.286342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3ff095a6a274932aae6fea3772aa432 [ 798.316651] env[62109]: DEBUG nova.policy [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0c3dc5a397c4f1e9118b36d504166f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9cb6ab6d9d64c6eb3d4d8d37563f6cc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 798.559783] env[62109]: INFO nova.scheduler.client.report [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Deleted allocations for instance ae026dca-dc05-4710-8a03-4e792a0dc61d [ 798.565828] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Expecting reply to msg 4b47508529d54f95a2ced59254d04b51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 798.579831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b47508529d54f95a2ced59254d04b51 [ 798.608055] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Successfully created port: e8178202-8899-4251-9c65-7ab8b9217948 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 798.638779] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b049d4-4ef1-4849-ae5e-d34f72500d2a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.646221] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4a56ea-f59f-4104-90ed-5b16f9a19999 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.683165] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8676a970-5168-4725-97de-d8a644bbf8cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.690236] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f236c34d-ef34-4d12-afe8-979fd08e172a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.702820] env[62109]: DEBUG nova.compute.provider_tree [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.703373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg e7cce450cfa64cd7822691c66838c608 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 798.711529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7cce450cfa64cd7822691c66838c608 [ 798.780215] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 798.782037] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 88ec99a374134b2ab3bf4ce23927733c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 798.816670] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88ec99a374134b2ab3bf4ce23927733c [ 799.067841] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ba02dd96-7ea9-4b4a-904a-ab5cbe3d08b1 tempest-AttachInterfacesUnderV243Test-451709166 tempest-AttachInterfacesUnderV243Test-451709166-project-member] Lock "ae026dca-dc05-4710-8a03-4e792a0dc61d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.490s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.068148] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 855e4d8e7a5a43b69de0393ef9730e5b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 799.077432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 855e4d8e7a5a43b69de0393ef9730e5b [ 799.206287] env[62109]: DEBUG nova.scheduler.client.report [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.208909] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg c6512fe2f26f4bd18156950c9a3de6f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 799.220027] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6512fe2f26f4bd18156950c9a3de6f7 [ 799.286559] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 040183ecc78c4d02ab8a4825c6ad7601 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 799.318163] env[62109]: DEBUG nova.compute.manager [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Received event network-changed-e8178202-8899-4251-9c65-7ab8b9217948 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 799.318357] env[62109]: DEBUG nova.compute.manager [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Refreshing instance network info cache due to event network-changed-e8178202-8899-4251-9c65-7ab8b9217948. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 799.318563] env[62109]: DEBUG oslo_concurrency.lockutils [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] Acquiring lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.318694] env[62109]: DEBUG oslo_concurrency.lockutils [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] Acquired lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.318845] env[62109]: DEBUG nova.network.neutron [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Refreshing network info cache for port e8178202-8899-4251-9c65-7ab8b9217948 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 799.319318] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] Expecting reply to msg 0343492952424abbbe8b8814031821a3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 799.325790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0343492952424abbbe8b8814031821a3 [ 799.330879] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 040183ecc78c4d02ab8a4825c6ad7601 [ 799.523387] env[62109]: ERROR nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. [ 799.523387] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 799.523387] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 799.523387] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 799.523387] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 799.523387] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 799.523387] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 799.523387] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 799.523387] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.523387] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 799.523387] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.523387] env[62109]: ERROR nova.compute.manager raise self.value [ 799.523387] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 799.523387] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 799.523387] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.523387] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 799.523808] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.523808] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 799.523808] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. [ 799.523808] env[62109]: ERROR nova.compute.manager [ 799.523808] env[62109]: Traceback (most recent call last): [ 799.523808] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 799.523808] env[62109]: listener.cb(fileno) [ 799.523808] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 799.523808] env[62109]: result = function(*args, **kwargs) [ 799.523808] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 799.523808] env[62109]: return func(*args, **kwargs) [ 799.523808] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 799.523808] env[62109]: raise e [ 799.523808] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 799.523808] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 799.523808] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 799.523808] env[62109]: created_port_ids = self._update_ports_for_instance( [ 799.523808] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 799.523808] env[62109]: with excutils.save_and_reraise_exception(): [ 799.523808] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.523808] env[62109]: self.force_reraise() [ 799.523808] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.523808] env[62109]: raise self.value [ 799.523808] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 799.523808] env[62109]: updated_port = self._update_port( [ 799.523808] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.523808] env[62109]: _ensure_no_port_binding_failure(port) [ 799.523808] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.523808] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 799.524468] env[62109]: nova.exception.PortBindingFailed: Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. [ 799.524468] env[62109]: Removing descriptor: 16 [ 799.570454] env[62109]: DEBUG nova.compute.manager [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 799.572351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 6bf5b1fb845c4e40bd5856fb9a466b90 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 799.603311] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bf5b1fb845c4e40bd5856fb9a466b90 [ 799.713531] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.946s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.714777] env[62109]: ERROR nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Traceback (most recent call last): [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self.driver.spawn(context, instance, image_meta, [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self._vmops.spawn(context, instance, image_meta, injected_files, [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] vm_ref = self.build_virtual_machine(instance, [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] vif_infos = vmwarevif.get_vif_info(self._session, [ 799.714777] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] for vif in network_info: [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] return self._sync_wrapper(fn, *args, **kwargs) [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self.wait() [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self[:] = self._gt.wait() [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] return self._exit_event.wait() [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] current.throw(*self._exc) [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 799.715076] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] result = function(*args, **kwargs) [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] return func(*args, **kwargs) [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] raise e [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] nwinfo = self.network_api.allocate_for_instance( [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] created_port_ids = self._update_ports_for_instance( [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] with excutils.save_and_reraise_exception(): [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] self.force_reraise() [ 799.715374] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] raise self.value [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] updated_port = self._update_port( [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] _ensure_no_port_binding_failure(port) [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] raise exception.PortBindingFailed(port_id=port['id']) [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] nova.exception.PortBindingFailed: Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. [ 799.715667] env[62109]: ERROR nova.compute.manager [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] [ 799.715667] env[62109]: DEBUG nova.compute.utils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 799.716874] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.957s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.718408] env[62109]: INFO nova.compute.claims [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.728847] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg cccb04b9365b47689f1f2aac1ab50f12 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 799.728847] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Build of instance 16b04a1b-0ab3-4386-a1eb-74ef3e46a553 was re-scheduled: Binding failed for port f735d74f-e61d-4468-b208-318e406dcc17, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 799.728847] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 799.728847] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquiring lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.728847] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Acquired lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.729175] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 799.729175] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 585b178a9b3948dca5dc38647d978a97 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 799.732835] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 585b178a9b3948dca5dc38647d978a97 [ 799.781891] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cccb04b9365b47689f1f2aac1ab50f12 [ 799.789918] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 799.815435] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.815737] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.815923] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.816125] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.816272] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.816417] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.816624] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.816778] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.816943] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.817105] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.817273] env[62109]: DEBUG nova.virt.hardware [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.818421] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816e4fca-454e-4f02-b7d1-d363f443688b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.828589] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bf0fea-4a29-4dd8-b91f-496b2df64ea4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.845815] env[62109]: ERROR nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Traceback (most recent call last): [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] yield resources [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self.driver.spawn(context, instance, image_meta, [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] vm_ref = self.build_virtual_machine(instance, [ 799.845815] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] for vif in network_info: [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] return self._sync_wrapper(fn, *args, **kwargs) [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self.wait() [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self[:] = self._gt.wait() [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] return self._exit_event.wait() [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 799.846203] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] current.throw(*self._exc) [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] result = function(*args, **kwargs) [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] return func(*args, **kwargs) [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] raise e [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] nwinfo = self.network_api.allocate_for_instance( [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] created_port_ids = self._update_ports_for_instance( [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] with excutils.save_and_reraise_exception(): [ 799.846530] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self.force_reraise() [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] raise self.value [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] updated_port = self._update_port( [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] _ensure_no_port_binding_failure(port) [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] raise exception.PortBindingFailed(port_id=port['id']) [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] nova.exception.PortBindingFailed: Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. [ 799.846857] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] [ 799.846857] env[62109]: INFO nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Terminating instance [ 799.848281] env[62109]: DEBUG nova.network.neutron [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 799.850643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.990572] env[62109]: DEBUG nova.network.neutron [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.991190] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] Expecting reply to msg ab6a17c64067460d8c5fc57ad82f0618 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 800.000747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab6a17c64067460d8c5fc57ad82f0618 [ 800.092413] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.229172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 3f47695024b74ff995ae42b729735f3f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 800.236532] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f47695024b74ff995ae42b729735f3f [ 800.256218] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 800.312914] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.313427] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg df2b7442b6734f85828aea784803c02c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 800.321751] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df2b7442b6734f85828aea784803c02c [ 800.493501] env[62109]: DEBUG oslo_concurrency.lockutils [req-b03cc10f-f524-4dad-a36e-0ff200e79fb6 req-62022516-fa1a-4d50-a539-8c621991e4d3 service nova] Releasing lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.494140] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquired lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.494474] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 800.495088] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg cd20d46275d34fe391dba3da2578c179 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 800.502174] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd20d46275d34fe391dba3da2578c179 [ 800.753276] env[62109]: DEBUG nova.scheduler.client.report [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Refreshing inventories for resource provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 800.767853] env[62109]: DEBUG nova.scheduler.client.report [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Updating ProviderTree inventory for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 800.767853] env[62109]: DEBUG nova.compute.provider_tree [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Updating inventory in ProviderTree for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.786230] env[62109]: DEBUG nova.scheduler.client.report [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Refreshing aggregate associations for resource provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e, aggregates: None {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 800.804249] env[62109]: DEBUG nova.scheduler.client.report [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Refreshing trait associations for resource provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62109) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 800.816189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Releasing lock "refresh_cache-16b04a1b-0ab3-4386-a1eb-74ef3e46a553" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.816343] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 800.816936] env[62109]: DEBUG nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 800.816936] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 800.840715] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 800.841446] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 20e5c76fc56347609242f2436c73e155 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 800.848848] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20e5c76fc56347609242f2436c73e155 [ 801.022649] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 801.141894] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.142413] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg b2484686b6a540cf96b4d061bce4c0b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 801.151077] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2484686b6a540cf96b4d061bce4c0b8 [ 801.270022] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b79277-47a1-4d83-b896-24822229c7c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.277759] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e80dec-29c8-4826-a1de-4de3670af564 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.311045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5ae9a2-164d-4d7a-9b47-f6f0e1789c7f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.318293] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffcac489-bb29-4767-a78f-a0d091f126f9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.333060] env[62109]: DEBUG nova.compute.provider_tree [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.333591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg b81a967b22d140b380f2cdf2be96e19f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 801.343063] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b81a967b22d140b380f2cdf2be96e19f [ 801.343632] env[62109]: DEBUG nova.network.neutron [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.344283] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 21657c2e1bf3492297e334c782881893 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 801.351623] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21657c2e1bf3492297e334c782881893 [ 801.354822] env[62109]: DEBUG nova.compute.manager [req-a597ee19-8306-4378-8ded-9da5d4e8af0a req-0762f0cc-2092-42c8-99c1-69bd7aee2fef service nova] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Received event network-vif-deleted-e8178202-8899-4251-9c65-7ab8b9217948 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 801.644299] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Releasing lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.644729] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 801.644920] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 801.645218] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0752dde1-0e1b-4b08-b32c-78932729d252 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.654922] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5536fc-de95-4e27-a0f1-56661aa1c67c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.676842] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a6ec5486-0843-4c38-b187-35d5296965a7 could not be found. [ 801.677134] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 801.677325] env[62109]: INFO nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Took 0.03 seconds to destroy the instance on the hypervisor. [ 801.677570] env[62109]: DEBUG oslo.service.loopingcall [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 801.677792] env[62109]: DEBUG nova.compute.manager [-] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 801.677884] env[62109]: DEBUG nova.network.neutron [-] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 801.707294] env[62109]: DEBUG nova.network.neutron [-] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 801.708130] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 53d5eb0165084bb98468f83916e22d26 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 801.716458] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53d5eb0165084bb98468f83916e22d26 [ 801.836473] env[62109]: DEBUG nova.scheduler.client.report [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 801.839281] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg bda4bf0a27cb4c129752e685b5a5c853 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 801.846993] env[62109]: INFO nova.compute.manager [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] [instance: 16b04a1b-0ab3-4386-a1eb-74ef3e46a553] Took 1.03 seconds to deallocate network for instance. [ 801.847764] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 06454329c52141ada062901af7ec1577 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 801.850618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bda4bf0a27cb4c129752e685b5a5c853 [ 801.882614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06454329c52141ada062901af7ec1577 [ 802.210446] env[62109]: DEBUG nova.network.neutron [-] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.210928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2b628eb236504fdba48e90ae4310ef0e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 802.219942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b628eb236504fdba48e90ae4310ef0e [ 802.342176] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.342176] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 802.344495] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg b6ce73abb8ed451ea7343eeb9401405a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 802.345091] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.323s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.347382] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg a26ccee79fe743babefb33e9d6f8233a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 802.351815] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 2481d97d216a451886a59553bf41412a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 802.383618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6ce73abb8ed451ea7343eeb9401405a [ 802.388208] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a26ccee79fe743babefb33e9d6f8233a [ 802.397647] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2481d97d216a451886a59553bf41412a [ 802.713444] env[62109]: INFO nova.compute.manager [-] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Took 1.04 seconds to deallocate network for instance. [ 802.715757] env[62109]: DEBUG nova.compute.claims [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 802.715950] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.850702] env[62109]: DEBUG nova.compute.utils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 802.851357] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 1414dc39e9d54a688435a59f0e573822 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 802.855844] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 802.855844] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 802.861003] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1414dc39e9d54a688435a59f0e573822 [ 802.879570] env[62109]: INFO nova.scheduler.client.report [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Deleted allocations for instance 16b04a1b-0ab3-4386-a1eb-74ef3e46a553 [ 802.884933] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Expecting reply to msg 96faa4465cd7498882001a9de910bd7a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 802.898727] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96faa4465cd7498882001a9de910bd7a [ 803.089507] env[62109]: DEBUG nova.policy [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6d97155b2a64df88fa3c3885c771c64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a8aeaac48ad42878d34975953c280fc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 803.117896] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a01fcd-1324-4a0f-ab71-434daecf7cae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.126897] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b07003e-2f3d-437c-a09e-dfbb670c1254 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.160923] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3281cec-10eb-4694-8598-78fc22024120 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.169879] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446707ea-3ccd-4cc1-95ef-04304e090b58 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.183037] env[62109]: DEBUG nova.compute.provider_tree [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.183547] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg aa81606951cb4026b17bfb0323424116 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 803.191257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa81606951cb4026b17bfb0323424116 [ 803.355750] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 803.357757] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg b4b798f35a57416c85e37a984e0753e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 803.388506] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4b798f35a57416c85e37a984e0753e7 [ 803.389215] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a726516d-0952-438f-baf7-b9ad271fcf78 tempest-MigrationsAdminTest-670983471 tempest-MigrationsAdminTest-670983471-project-member] Lock "16b04a1b-0ab3-4386-a1eb-74ef3e46a553" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 162.599s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.389831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 05909e0f724b439ebc0117ec73568bd5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 803.398408] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05909e0f724b439ebc0117ec73568bd5 [ 803.445683] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Successfully created port: bf7efc6a-fe14-4763-9bd8-3116fd5d40e2 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.686243] env[62109]: DEBUG nova.scheduler.client.report [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 803.688743] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 8d75e2df68d24073b0a450dd720f410d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 803.700152] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d75e2df68d24073b0a450dd720f410d [ 803.862298] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 1dfd7d93b6924d179891f6e43ff29827 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 803.891614] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 803.893383] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 4a1b3fe243174235b2380712a0b9c95d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 803.904885] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dfd7d93b6924d179891f6e43ff29827 [ 803.929147] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a1b3fe243174235b2380712a0b9c95d [ 804.201878] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.857s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.202874] env[62109]: ERROR nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Traceback (most recent call last): [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self.driver.spawn(context, instance, image_meta, [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] vm_ref = self.build_virtual_machine(instance, [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] vif_infos = vmwarevif.get_vif_info(self._session, [ 804.202874] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] for vif in network_info: [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] return self._sync_wrapper(fn, *args, **kwargs) [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self.wait() [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self[:] = self._gt.wait() [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] return self._exit_event.wait() [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] result = hub.switch() [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 804.203154] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] return self.greenlet.switch() [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] result = function(*args, **kwargs) [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] return func(*args, **kwargs) [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] raise e [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] nwinfo = self.network_api.allocate_for_instance( [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] created_port_ids = self._update_ports_for_instance( [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] with excutils.save_and_reraise_exception(): [ 804.203432] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] self.force_reraise() [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] raise self.value [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] updated_port = self._update_port( [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] _ensure_no_port_binding_failure(port) [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] raise exception.PortBindingFailed(port_id=port['id']) [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] nova.exception.PortBindingFailed: Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. [ 804.203696] env[62109]: ERROR nova.compute.manager [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] [ 804.203932] env[62109]: DEBUG nova.compute.utils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 804.205708] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.841s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.212078] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 8bb4b46a52344670b82206471545b7a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 804.215018] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Build of instance 08638aac-2c6c-4580-9894-6b3b3c1ec484 was re-scheduled: Binding failed for port ce1aee7f-24df-45b4-acff-b0bbb0657cbe, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 804.215732] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 804.216150] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.216435] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquired lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.216599] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 804.217201] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 48a85bebbfa34613a90d5d32325da752 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 804.226203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48a85bebbfa34613a90d5d32325da752 [ 804.257321] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bb4b46a52344670b82206471545b7a9 [ 804.290038] env[62109]: DEBUG nova.compute.manager [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Received event network-changed-bf7efc6a-fe14-4763-9bd8-3116fd5d40e2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 804.290038] env[62109]: DEBUG nova.compute.manager [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Refreshing instance network info cache due to event network-changed-bf7efc6a-fe14-4763-9bd8-3116fd5d40e2. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 804.290038] env[62109]: DEBUG oslo_concurrency.lockutils [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] Acquiring lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.290038] env[62109]: DEBUG oslo_concurrency.lockutils [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] Acquired lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.292974] env[62109]: DEBUG nova.network.neutron [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Refreshing network info cache for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 804.292974] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] Expecting reply to msg 9f9aea18f312483f85414756bcaaa077 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 804.301465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f9aea18f312483f85414756bcaaa077 [ 804.367647] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 804.395693] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 804.396128] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 804.396220] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.396339] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 804.397018] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.397207] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 804.397421] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 804.397577] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 804.397740] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 804.397987] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 804.398171] env[62109]: DEBUG nova.virt.hardware [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 804.401414] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3056f4-6dbc-4330-9248-910a7bd4c882 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.412838] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac7aaec-51e6-4a03-90c6-f86b4f3983b0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.428881] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.461877] env[62109]: ERROR nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. [ 804.461877] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 804.461877] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.461877] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 804.461877] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 804.461877] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 804.461877] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 804.461877] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 804.461877] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.461877] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 804.461877] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.461877] env[62109]: ERROR nova.compute.manager raise self.value [ 804.461877] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 804.461877] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 804.461877] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.461877] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 804.462295] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.462295] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 804.462295] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. [ 804.462295] env[62109]: ERROR nova.compute.manager [ 804.462295] env[62109]: Traceback (most recent call last): [ 804.462295] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 804.462295] env[62109]: listener.cb(fileno) [ 804.462295] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.462295] env[62109]: result = function(*args, **kwargs) [ 804.462295] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 804.462295] env[62109]: return func(*args, **kwargs) [ 804.462295] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.462295] env[62109]: raise e [ 804.462295] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.462295] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 804.462295] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 804.462295] env[62109]: created_port_ids = self._update_ports_for_instance( [ 804.462295] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 804.462295] env[62109]: with excutils.save_and_reraise_exception(): [ 804.462295] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.462295] env[62109]: self.force_reraise() [ 804.462295] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.462295] env[62109]: raise self.value [ 804.462295] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 804.462295] env[62109]: updated_port = self._update_port( [ 804.462295] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.462295] env[62109]: _ensure_no_port_binding_failure(port) [ 804.462295] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.462295] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 804.462981] env[62109]: nova.exception.PortBindingFailed: Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. [ 804.462981] env[62109]: Removing descriptor: 16 [ 804.462981] env[62109]: ERROR nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Traceback (most recent call last): [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] yield resources [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self.driver.spawn(context, instance, image_meta, [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 804.462981] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] vm_ref = self.build_virtual_machine(instance, [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] for vif in network_info: [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] return self._sync_wrapper(fn, *args, **kwargs) [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self.wait() [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self[:] = self._gt.wait() [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] return self._exit_event.wait() [ 804.463329] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] result = hub.switch() [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] return self.greenlet.switch() [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] result = function(*args, **kwargs) [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] return func(*args, **kwargs) [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] raise e [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] nwinfo = self.network_api.allocate_for_instance( [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 804.463638] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] created_port_ids = self._update_ports_for_instance( [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] with excutils.save_and_reraise_exception(): [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self.force_reraise() [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] raise self.value [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] updated_port = self._update_port( [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] _ensure_no_port_binding_failure(port) [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.464044] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] raise exception.PortBindingFailed(port_id=port['id']) [ 804.464332] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] nova.exception.PortBindingFailed: Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. [ 804.464332] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] [ 804.464332] env[62109]: INFO nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Terminating instance [ 804.465506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Acquiring lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.741329] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.814984] env[62109]: DEBUG nova.network.neutron [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.846948] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.847604] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 9591b622a3654d84b68e2d1b630cbe68 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 804.856024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9591b622a3654d84b68e2d1b630cbe68 [ 804.919163] env[62109]: DEBUG nova.network.neutron [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.919689] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] Expecting reply to msg a6f8e03f746646279eb7c42d93bb92e9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 804.927304] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6f8e03f746646279eb7c42d93bb92e9 [ 805.010813] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de821c62-e0a1-4762-8e59-6527a8b57432 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.020185] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e5907d-4a4e-4c66-ae27-77b80f514ad0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.053829] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8227cd-a2e0-45f2-a466-0536d5930a59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.062702] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0360c950-cd03-4ab6-a175-17cff4794994 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.078700] env[62109]: DEBUG nova.compute.provider_tree [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.079244] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg f5b7c3e098c74609a7354f8240763fb0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 805.089515] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5b7c3e098c74609a7354f8240763fb0 [ 805.349532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Releasing lock "refresh_cache-08638aac-2c6c-4580-9894-6b3b3c1ec484" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.349921] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 805.350182] env[62109]: DEBUG nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 805.350487] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 805.367034] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.367981] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 5e9869d9e24942a0a5ef470486047345 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 805.374940] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e9869d9e24942a0a5ef470486047345 [ 805.421884] env[62109]: DEBUG oslo_concurrency.lockutils [req-38e3835b-e7b5-4306-ac92-65f31343e807 req-f1054877-5af2-4caa-be9e-16c70f5c8cd9 service nova] Releasing lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.422442] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Acquired lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.422842] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 805.423443] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg a59c2396f3874c03a8910c091ab68597 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 805.430720] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a59c2396f3874c03a8910c091ab68597 [ 805.581686] env[62109]: DEBUG nova.scheduler.client.report [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.584088] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 7850b5d8527a4dc082d63fa2db844240 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 805.599805] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7850b5d8527a4dc082d63fa2db844240 [ 805.869513] env[62109]: DEBUG nova.network.neutron [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.870024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg bc8b5bd10cbb4655be43da0473f1dae9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 805.878322] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc8b5bd10cbb4655be43da0473f1dae9 [ 805.938698] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.017030] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.017548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 6d5f5020e15248caa1b1cfd664c98227 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 806.025850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d5f5020e15248caa1b1cfd664c98227 [ 806.087000] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.881s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.087700] env[62109]: ERROR nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Traceback (most recent call last): [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self.driver.spawn(context, instance, image_meta, [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] vm_ref = self.build_virtual_machine(instance, [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] vif_infos = vmwarevif.get_vif_info(self._session, [ 806.087700] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] for vif in network_info: [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] return self._sync_wrapper(fn, *args, **kwargs) [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self.wait() [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self[:] = self._gt.wait() [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] return self._exit_event.wait() [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] result = hub.switch() [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 806.088034] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] return self.greenlet.switch() [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] result = function(*args, **kwargs) [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] return func(*args, **kwargs) [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] raise e [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] nwinfo = self.network_api.allocate_for_instance( [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] created_port_ids = self._update_ports_for_instance( [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] with excutils.save_and_reraise_exception(): [ 806.088354] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] self.force_reraise() [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] raise self.value [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] updated_port = self._update_port( [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] _ensure_no_port_binding_failure(port) [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] raise exception.PortBindingFailed(port_id=port['id']) [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] nova.exception.PortBindingFailed: Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. [ 806.088667] env[62109]: ERROR nova.compute.manager [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] [ 806.088902] env[62109]: DEBUG nova.compute.utils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 806.089714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.557s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.091184] env[62109]: INFO nova.compute.claims [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.092845] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 408db2f79da545369beab0e2bd5af1d3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 806.093984] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Build of instance 66a0a424-ecb6-43df-9b47-946ff1e1b7b2 was re-scheduled: Binding failed for port 9e03f450-e277-4f0a-98c6-050a2f8f2359, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 806.094414] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 806.094635] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquiring lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.094780] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Acquired lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.094935] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 806.095282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 09e28ea7a20c465cb4d43ac37f0122a8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 806.101099] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09e28ea7a20c465cb4d43ac37f0122a8 [ 806.134437] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 408db2f79da545369beab0e2bd5af1d3 [ 806.204494] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.204714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.314612] env[62109]: DEBUG nova.compute.manager [req-c4f29840-5255-41c5-b4e0-97f5b001592d req-465cbf94-b1ff-4ac7-90de-229057af6137 service nova] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Received event network-vif-deleted-bf7efc6a-fe14-4763-9bd8-3116fd5d40e2 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 806.373651] env[62109]: INFO nova.compute.manager [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 08638aac-2c6c-4580-9894-6b3b3c1ec484] Took 1.02 seconds to deallocate network for instance. [ 806.375006] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg c2ce7852d7834be89973597ee5b28188 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 806.409954] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2ce7852d7834be89973597ee5b28188 [ 806.520687] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Releasing lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.521157] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 806.521430] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 806.521741] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c7675c8-e47e-4b30-9a9d-ef3e16eef2fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.530639] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055655df-1b01-4774-86b0-5dc8498320f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.555015] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c7ec619c-1b00-4d58-a593-671c0139c4e3 could not be found. [ 806.555287] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 806.555427] env[62109]: INFO nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 806.555688] env[62109]: DEBUG oslo.service.loopingcall [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.555877] env[62109]: DEBUG nova.compute.manager [-] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 806.555994] env[62109]: DEBUG nova.network.neutron [-] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 806.571274] env[62109]: DEBUG nova.network.neutron [-] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.571791] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ead3ea16d8fe4005a1fe7807d959c848 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 806.579154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ead3ea16d8fe4005a1fe7807d959c848 [ 806.599423] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 0a20cdc6f55b49219e1a42000956c50e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 806.612285] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a20cdc6f55b49219e1a42000956c50e [ 806.617014] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.710371] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.710894] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 0d8ad372981240ce8ce3e604ec02c05b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 806.719225] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d8ad372981240ce8ce3e604ec02c05b [ 806.880169] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg ec232887034f498ea020f93df13ccde4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 806.914758] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec232887034f498ea020f93df13ccde4 [ 807.074225] env[62109]: DEBUG nova.network.neutron [-] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.074692] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 255502ab2d0d4ca1b2a062033dc57de4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 807.082485] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 255502ab2d0d4ca1b2a062033dc57de4 [ 807.214514] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Releasing lock "refresh_cache-66a0a424-ecb6-43df-9b47-946ff1e1b7b2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.214733] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 807.214902] env[62109]: DEBUG nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 807.215070] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 807.228223] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.228748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg 275d987320394b6499be51ae084e7c1d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 807.236318] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 275d987320394b6499be51ae084e7c1d [ 807.378129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a627cad-a06a-4819-8667-052e8e6ebd9f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.388250] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc43d35-f89d-41e0-bf4d-cf0d0063d7cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.424312] env[62109]: INFO nova.scheduler.client.report [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Deleted allocations for instance 08638aac-2c6c-4580-9894-6b3b3c1ec484 [ 807.437469] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05020eb-fbf7-4f4c-85ca-0e9dc9e3a6b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.440483] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 1422dfa8fcc94341a9851fbd481fa439 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 807.445860] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff1c88b-18bb-44e2-9e0a-47bf936ea184 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.463607] env[62109]: DEBUG nova.compute.provider_tree [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.464137] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg abc53869bd8449c4882130adca9007b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 807.465142] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1422dfa8fcc94341a9851fbd481fa439 [ 807.472458] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abc53869bd8449c4882130adca9007b8 [ 807.576779] env[62109]: INFO nova.compute.manager [-] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Took 1.02 seconds to deallocate network for instance. [ 807.579381] env[62109]: DEBUG nova.compute.claims [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 807.579559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.731514] env[62109]: DEBUG nova.network.neutron [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.732132] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg be2e78ab2b2848c2bc171c419f46d4d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 807.740721] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be2e78ab2b2848c2bc171c419f46d4d7 [ 807.942848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-101896f4-f52c-4a4f-94f6-1fd61758c65f tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "08638aac-2c6c-4580-9894-6b3b3c1ec484" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 162.004s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.943548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg cf2eac5e41744a0290aa16dee939a68a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 807.952721] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf2eac5e41744a0290aa16dee939a68a [ 807.966769] env[62109]: DEBUG nova.scheduler.client.report [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 807.969149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 70fd34e308494fe7ad3ed265b792957e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 807.981330] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70fd34e308494fe7ad3ed265b792957e [ 808.233920] env[62109]: INFO nova.compute.manager [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] [instance: 66a0a424-ecb6-43df-9b47-946ff1e1b7b2] Took 1.02 seconds to deallocate network for instance. [ 808.235768] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg b8a1290a38444a268d8426b17fbd30a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 808.278573] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8a1290a38444a268d8426b17fbd30a9 [ 808.447015] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 808.448985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 3af196b05f99473f8df917192b4297e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 808.472215] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.472726] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 808.474467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg d7401ca3a8244407b148d06fc595ae5b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 808.475455] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.024s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.477138] env[62109]: INFO nova.compute.claims [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.478693] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg ed0cc246fe4446569a2d56b17d652fef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 808.511218] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3af196b05f99473f8df917192b4297e5 [ 808.524647] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7401ca3a8244407b148d06fc595ae5b [ 808.527899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed0cc246fe4446569a2d56b17d652fef [ 808.740067] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg ce770725c65e466d857c471eac8cd518 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 808.799302] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce770725c65e466d857c471eac8cd518 [ 808.968872] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.981453] env[62109]: DEBUG nova.compute.utils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 808.982075] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 5083f259ad074eb19b81527000003ff4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 808.983951] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg ebae2048f7cb4b6c8a5b2c76e7d834c5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 808.991487] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 808.991671] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 808.993749] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebae2048f7cb4b6c8a5b2c76e7d834c5 [ 808.994274] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5083f259ad074eb19b81527000003ff4 [ 808.997426] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 808.999143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg cd3af2a8195b419c99663a21a462c5bc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 809.031958] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd3af2a8195b419c99663a21a462c5bc [ 809.071409] env[62109]: DEBUG nova.policy [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2857ea8202fc46b09da06c4ac904df95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f74de90347d44395b052d75738d6a065', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 809.241974] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a728f90-4772-4a19-a4cf-ce089251829b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.252525] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7085bd-3df2-460f-9360-c7646f005013 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.286515] env[62109]: INFO nova.scheduler.client.report [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Deleted allocations for instance 66a0a424-ecb6-43df-9b47-946ff1e1b7b2 [ 809.292940] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Expecting reply to msg ab76123ce00f4b7abe9d58152dd60ae4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 809.294618] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7613b370-a771-4161-b212-3c565ddb0b93 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.302455] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbffe2d9-be4c-4ca9-8cf5-234c3cfb7d2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.308390] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab76123ce00f4b7abe9d58152dd60ae4 [ 809.319481] env[62109]: DEBUG nova.compute.provider_tree [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.319977] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 30680070a12b43ef8b8b4a6a431aa173 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 809.336679] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30680070a12b43ef8b8b4a6a431aa173 [ 809.503523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg b4eecc5af7604ae9ae052c4d3dcf7afb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 809.537565] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4eecc5af7604ae9ae052c4d3dcf7afb [ 809.616864] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Successfully created port: bb0cdfe1-f3b4-46b1-a6e1-1170d0541113 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.794656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-aa878a52-ac9d-4ae5-8cc8-b3ed9736f7af tempest-SecurityGroupsTestJSON-704559567 tempest-SecurityGroupsTestJSON-704559567-project-member] Lock "66a0a424-ecb6-43df-9b47-946ff1e1b7b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.084s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.795259] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 5a18e477d1594828abb4c1c806e03d02 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 809.806166] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a18e477d1594828abb4c1c806e03d02 [ 809.822025] env[62109]: DEBUG nova.scheduler.client.report [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 809.824329] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg f13f8c169bbf4d4b84fbdd4cc6067ddd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 809.835396] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f13f8c169bbf4d4b84fbdd4cc6067ddd [ 810.007216] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 810.027430] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 810.027678] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 810.027851] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.028083] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 810.028251] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.028397] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 810.028599] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 810.028755] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 810.028918] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 810.029108] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 810.029277] env[62109]: DEBUG nova.virt.hardware [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 810.030210] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2632c60-6e89-46de-88cd-dccae5302707 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.038818] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e040ee-d0ee-44a0-b6a9-074db2e709e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.298378] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 810.300381] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 573825bda56e4b6dafa3dc10987807da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 810.327164] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.852s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.327707] env[62109]: DEBUG nova.compute.manager [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 810.329417] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 3a953f31a47548399777b743fe236c80 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 810.330369] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.847s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.331100] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 58e412a2e27943bea475291ed311cd18 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 810.333293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 573825bda56e4b6dafa3dc10987807da [ 810.356905] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58e412a2e27943bea475291ed311cd18 [ 810.363608] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a953f31a47548399777b743fe236c80 [ 810.465636] env[62109]: DEBUG nova.compute.manager [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Received event network-changed-bb0cdfe1-f3b4-46b1-a6e1-1170d0541113 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 810.465847] env[62109]: DEBUG nova.compute.manager [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Refreshing instance network info cache due to event network-changed-bb0cdfe1-f3b4-46b1-a6e1-1170d0541113. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 810.466094] env[62109]: DEBUG oslo_concurrency.lockutils [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] Acquiring lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.466240] env[62109]: DEBUG oslo_concurrency.lockutils [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] Acquired lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.466400] env[62109]: DEBUG nova.network.neutron [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Refreshing network info cache for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 810.466818] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] Expecting reply to msg 65ad712f504641f7b5bf6fec5c9aa7d5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 810.473328] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65ad712f504641f7b5bf6fec5c9aa7d5 [ 810.652303] env[62109]: ERROR nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. [ 810.652303] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 810.652303] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 810.652303] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 810.652303] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.652303] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 810.652303] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.652303] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 810.652303] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.652303] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 810.652303] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.652303] env[62109]: ERROR nova.compute.manager raise self.value [ 810.652303] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.652303] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 810.652303] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.652303] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 810.652692] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.652692] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 810.652692] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. [ 810.652692] env[62109]: ERROR nova.compute.manager [ 810.652692] env[62109]: Traceback (most recent call last): [ 810.652692] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 810.652692] env[62109]: listener.cb(fileno) [ 810.652692] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 810.652692] env[62109]: result = function(*args, **kwargs) [ 810.652692] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 810.652692] env[62109]: return func(*args, **kwargs) [ 810.652692] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 810.652692] env[62109]: raise e [ 810.652692] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 810.652692] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 810.652692] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.652692] env[62109]: created_port_ids = self._update_ports_for_instance( [ 810.652692] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.652692] env[62109]: with excutils.save_and_reraise_exception(): [ 810.652692] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.652692] env[62109]: self.force_reraise() [ 810.652692] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.652692] env[62109]: raise self.value [ 810.652692] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.652692] env[62109]: updated_port = self._update_port( [ 810.652692] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.652692] env[62109]: _ensure_no_port_binding_failure(port) [ 810.652692] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.652692] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 810.653405] env[62109]: nova.exception.PortBindingFailed: Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. [ 810.653405] env[62109]: Removing descriptor: 19 [ 810.653405] env[62109]: ERROR nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Traceback (most recent call last): [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] yield resources [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self.driver.spawn(context, instance, image_meta, [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 810.653405] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] vm_ref = self.build_virtual_machine(instance, [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] for vif in network_info: [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] return self._sync_wrapper(fn, *args, **kwargs) [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self.wait() [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self[:] = self._gt.wait() [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] return self._exit_event.wait() [ 810.653725] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] result = hub.switch() [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] return self.greenlet.switch() [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] result = function(*args, **kwargs) [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] return func(*args, **kwargs) [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] raise e [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] nwinfo = self.network_api.allocate_for_instance( [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.654002] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] created_port_ids = self._update_ports_for_instance( [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] with excutils.save_and_reraise_exception(): [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self.force_reraise() [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] raise self.value [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] updated_port = self._update_port( [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] _ensure_no_port_binding_failure(port) [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.654310] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] raise exception.PortBindingFailed(port_id=port['id']) [ 810.654565] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] nova.exception.PortBindingFailed: Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. [ 810.654565] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] [ 810.654565] env[62109]: INFO nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Terminating instance [ 810.656152] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.681194] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "3f99ec88-f05f-4583-b08b-d40fb37e275e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.681809] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "3f99ec88-f05f-4583-b08b-d40fb37e275e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.821454] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.834882] env[62109]: DEBUG nova.compute.utils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 810.835495] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg d3874fcf90354562bb89d9a2def61331 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 810.839241] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 642a87dd31404ec799e0e5d1f68539d9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 810.840352] env[62109]: DEBUG nova.compute.manager [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 810.850779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3874fcf90354562bb89d9a2def61331 [ 810.851279] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 642a87dd31404ec799e0e5d1f68539d9 [ 810.983890] env[62109]: DEBUG nova.network.neutron [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.060852] env[62109]: DEBUG nova.network.neutron [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.061463] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] Expecting reply to msg c609446b5ffa4e2eb477d380d1334e77 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 811.069735] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c609446b5ffa4e2eb477d380d1334e77 [ 811.337446] env[62109]: DEBUG nova.compute.manager [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 811.339148] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg bd6c701bb3e3494eb85799faff4abec7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 811.363014] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance e8c77459-e3a3-4a68-9f76-0757dd0f2587 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 811.363188] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 9a1c4327-64b3-4c4d-b6ae-77959084b405 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 811.363317] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance a6ec5486-0843-4c38-b187-35d5296965a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 811.363415] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance c7ec619c-1b00-4d58-a593-671c0139c4e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 811.363528] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance a04d014e-bed6-4e4b-a5eb-316d88c174f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 811.363641] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance b95c60dc-50c4-4afc-acb0-3308e490b808 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 811.364229] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg e230fd9340ef48eb81f50257b704ee1a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 811.382163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e230fd9340ef48eb81f50257b704ee1a [ 811.382163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd6c701bb3e3494eb85799faff4abec7 [ 811.563937] env[62109]: DEBUG oslo_concurrency.lockutils [req-96958567-9b7b-423a-ac44-b03b4402a258 req-34501ecb-83d1-4884-81b7-8a11bb4447ea service nova] Releasing lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.564376] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquired lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.564581] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 811.565118] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 7b8c8d8309e449f99797eecdae681daa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 811.572481] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b8c8d8309e449f99797eecdae681daa [ 811.843660] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 8094b0541bc843df8e9054450407afce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 811.866871] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 900e1e1e-5635-4782-bd87-046dd2af7dad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.867444] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 488ad66b8d244bc297d23d5589c7768d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 811.873789] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8094b0541bc843df8e9054450407afce [ 811.877251] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 488ad66b8d244bc297d23d5589c7768d [ 812.085480] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.206442] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.206780] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 6c0d05b6ee8144bf9b073e3f85ccba04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 812.215541] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c0d05b6ee8144bf9b073e3f85ccba04 [ 812.346998] env[62109]: DEBUG nova.compute.manager [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 812.370014] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance afc5587e-7fd5-4b07-aff8-98ef8358985f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.370580] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg dcef6d02359747a8b139cfe416629f33 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 812.373213] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 812.373421] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 812.374136] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.374136] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 812.374136] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.374136] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 812.374335] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 812.374364] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 812.374503] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 812.374661] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 812.374821] env[62109]: DEBUG nova.virt.hardware [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 812.375894] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bc2ed4-268a-45cf-9189-08f6c5bf027f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.380867] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcef6d02359747a8b139cfe416629f33 [ 812.385078] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c74da12-6bf1-4391-8cd4-09ddc4b511fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.404090] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.412042] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Creating folder: Project (c0b7ebae5346428b85094247a63eb203). Parent ref: group-v108864. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 812.412823] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea5620ad-ed90-41a4-9a80-ff3b8af8e4d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.425338] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Created folder: Project (c0b7ebae5346428b85094247a63eb203) in parent group-v108864. [ 812.425530] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Creating folder: Instances. Parent ref: group-v108875. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 812.425896] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14561f22-1d07-4937-8362-8c6bfe805857 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.434887] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Created folder: Instances in parent group-v108875. [ 812.435107] env[62109]: DEBUG oslo.service.loopingcall [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.435284] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 812.435467] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90e378a5-6164-4d39-a416-4e3c72bd0e48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.452308] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 812.452308] env[62109]: value = "task-401470" [ 812.452308] env[62109]: _type = "Task" [ 812.452308] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.459839] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401470, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.518528] env[62109]: DEBUG nova.compute.manager [req-5f8f806d-d38e-4ef6-9749-7d85d34b2ea3 req-28627389-0ca1-4889-bc6e-bda99a39544f service nova] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Received event network-vif-deleted-bb0cdfe1-f3b4-46b1-a6e1-1170d0541113 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 812.709236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Releasing lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.709727] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 812.709937] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 812.710268] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0575f928-eb0a-4fd2-9160-64978c871863 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.720713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf68667-1124-4dec-a4f7-c4ec4830a3e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.745187] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a04d014e-bed6-4e4b-a5eb-316d88c174f0 could not be found. [ 812.745449] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 812.745635] env[62109]: INFO nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 812.745958] env[62109]: DEBUG oslo.service.loopingcall [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.746204] env[62109]: DEBUG nova.compute.manager [-] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 812.746298] env[62109]: DEBUG nova.network.neutron [-] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.764468] env[62109]: DEBUG nova.network.neutron [-] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.764965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 09c8d3e044554d8fbd3dc0d1e830fee5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 812.774260] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09c8d3e044554d8fbd3dc0d1e830fee5 [ 812.880849] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 47b83dbe-d7d8-4875-bb79-95a8fecf4028 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.880849] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 59291517096343fbb239f95c7882e47e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 812.890815] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59291517096343fbb239f95c7882e47e [ 812.962642] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401470, 'name': CreateVM_Task, 'duration_secs': 0.263912} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.962945] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 812.963511] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.963819] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.964333] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 812.965004] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc5a2c6a-af4b-48c4-b8c3-0eda09e52d68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.969890] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 812.969890] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52f62d88-b35b-1829-048d-99217cefdc75" [ 812.969890] env[62109]: _type = "Task" [ 812.969890] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.977745] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52f62d88-b35b-1829-048d-99217cefdc75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.266652] env[62109]: DEBUG nova.network.neutron [-] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.267159] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7aaa6fabc23c4332ba2a5f8efaafd17e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 813.277244] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7aaa6fabc23c4332ba2a5f8efaafd17e [ 813.382899] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance aa1afca5-8194-4a9d-bcd0-e3e91c15338c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.383476] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg f95c48c85dcd469faccd425d978215b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 813.393886] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f95c48c85dcd469faccd425d978215b8 [ 813.488224] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52f62d88-b35b-1829-048d-99217cefdc75, 'name': SearchDatastore_Task, 'duration_secs': 0.009463} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.488224] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.488224] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 813.488224] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.488493] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.488493] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 813.488493] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfba1e44-dd57-4b19-b190-32917aa5a8f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.490081] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 813.490231] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 813.490980] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-803b4bab-3cfc-4864-ac29-5f8bb5c2b8ad {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.496404] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 813.496404] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5208852e-e4f9-8c61-0377-d00fbb1cca19" [ 813.496404] env[62109]: _type = "Task" [ 813.496404] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.504163] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5208852e-e4f9-8c61-0377-d00fbb1cca19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.780582] env[62109]: INFO nova.compute.manager [-] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Took 1.03 seconds to deallocate network for instance. [ 813.780582] env[62109]: DEBUG nova.compute.claims [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 813.780582] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.886831] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 732cf1e3-823d-4769-ad16-f5b492be53d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.887640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 216cb0369bad4ecd9e198343b602bf6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 813.900069] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 216cb0369bad4ecd9e198343b602bf6d [ 814.006469] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5208852e-e4f9-8c61-0377-d00fbb1cca19, 'name': SearchDatastore_Task, 'duration_secs': 0.008133} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.007241] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f29942f-8e07-410f-80cc-e43009ed041d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.012297] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 814.012297] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5281c058-1c28-2735-dc14-4698e3512f47" [ 814.012297] env[62109]: _type = "Task" [ 814.012297] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.019387] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5281c058-1c28-2735-dc14-4698e3512f47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.390588] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 436788b9-92bb-4088-9c24-c2e9a073c09d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.391201] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg d5e3a9d540464b79b31499e904a0f491 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 814.401395] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5e3a9d540464b79b31499e904a0f491 [ 814.523148] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5281c058-1c28-2735-dc14-4698e3512f47, 'name': SearchDatastore_Task, 'duration_secs': 0.008115} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.523399] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.523644] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] b95c60dc-50c4-4afc-acb0-3308e490b808/b95c60dc-50c4-4afc-acb0-3308e490b808.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 814.523890] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbbf5a01-1dea-4b33-9c41-22f41caa60dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.532135] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 814.532135] env[62109]: value = "task-401471" [ 814.532135] env[62109]: _type = "Task" [ 814.532135] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.540831] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.898243] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 53d6d89d-04bb-421d-994c-014830491dfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.898883] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 8f3bf0481cf74b968773e7343524d217 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 814.909961] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f3bf0481cf74b968773e7343524d217 [ 815.042264] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401471, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447658} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.042610] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] b95c60dc-50c4-4afc-acb0-3308e490b808/b95c60dc-50c4-4afc-acb0-3308e490b808.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 815.042821] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.043385] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e179815c-5184-42fd-9568-d6cdac2f1e99 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.055276] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 815.055276] env[62109]: value = "task-401472" [ 815.055276] env[62109]: _type = "Task" [ 815.055276] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.057416] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.401363] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3ada5090-7219-4835-b508-2188501ae5e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 815.401954] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg a3ccefaf3abb49e5bb04d14dca2a3292 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 815.419415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3ccefaf3abb49e5bb04d14dca2a3292 [ 815.561485] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088951} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.561769] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 815.563220] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833a9600-fdd1-4303-a589-4eeb1f6bbf20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.583134] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] b95c60dc-50c4-4afc-acb0-3308e490b808/b95c60dc-50c4-4afc-acb0-3308e490b808.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 815.583495] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1b71717-b2a3-4111-880f-e93e2cfa10c2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.605588] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 815.605588] env[62109]: value = "task-401473" [ 815.605588] env[62109]: _type = "Task" [ 815.605588] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.613790] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.913755] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 6163fcd4-cfe4-4432-ba8d-665319fa11ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 815.914351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 82d9639070df4a15a4519ddb05d4dd6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 815.934310] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82d9639070df4a15a4519ddb05d4dd6d [ 816.115066] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401473, 'name': ReconfigVM_Task, 'duration_secs': 0.267798} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.115472] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Reconfigured VM instance instance-00000039 to attach disk [datastore1] b95c60dc-50c4-4afc-acb0-3308e490b808/b95c60dc-50c4-4afc-acb0-3308e490b808.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.116524] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61d16fd8-eea9-46b8-86cc-54f1988e99b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.123434] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 816.123434] env[62109]: value = "task-401474" [ 816.123434] env[62109]: _type = "Task" [ 816.123434] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.131514] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401474, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.421075] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 252b7e84-4f91-4078-a81c-392d622b6ce2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 816.421692] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg bed6b20597a941569030b7ccd8a82439 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 816.439078] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bed6b20597a941569030b7ccd8a82439 [ 816.541388] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "1a38b70f-eabe-4b11-a371-cf971184211f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.541659] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "1a38b70f-eabe-4b11-a371-cf971184211f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.573913] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "8a6d10c8-bd2b-40dd-9897-8f30223abe81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.574137] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "8a6d10c8-bd2b-40dd-9897-8f30223abe81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.632810] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401474, 'name': Rename_Task, 'duration_secs': 0.140106} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.633079] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 816.633313] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e70587bd-f7ee-43d0-9df4-0e2d1cf4da51 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.640082] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 816.640082] env[62109]: value = "task-401475" [ 816.640082] env[62109]: _type = "Task" [ 816.640082] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.647355] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.928648] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 816.928648] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 797cf553b5c4448f89ff6109b5e07d48 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 816.935102] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 797cf553b5c4448f89ff6109b5e07d48 [ 817.149664] env[62109]: DEBUG oslo_vmware.api [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401475, 'name': PowerOnVM_Task, 'duration_secs': 0.437681} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.149922] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 817.150166] env[62109]: INFO nova.compute.manager [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Took 4.80 seconds to spawn the instance on the hypervisor. [ 817.150294] env[62109]: DEBUG nova.compute.manager [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 817.151295] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b244f90-09a5-4933-958d-5017d5be74cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.162753] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 1a78a263bf114d7ca941b5c4e745d20a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 817.198167] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a78a263bf114d7ca941b5c4e745d20a [ 817.427336] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance f453b695-8abd-44fa-8468-75c6aaeec19a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.427918] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg f6bb81007b0d468e9d82a923c76ec7e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 817.438054] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6bb81007b0d468e9d82a923c76ec7e7 [ 817.672228] env[62109]: INFO nova.compute.manager [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Took 24.24 seconds to build instance. [ 817.672584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg b85e762745e14562baac7697000a4d12 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 817.685784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b85e762745e14562baac7697000a4d12 [ 817.930477] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance e2e09174-6ba1-44ad-ba3e-cdcae5a2d698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 817.931701] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 00022fdf87c844d49d29ed064301c102 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 817.941282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00022fdf87c844d49d29ed064301c102 [ 818.174423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-a8cd4b97-b5aa-417a-8297-971fb71887bb tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "b95c60dc-50c4-4afc-acb0-3308e490b808" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.821s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.175158] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 493aad443a034292b64a8f5a0f9ff98f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 818.186708] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 493aad443a034292b64a8f5a0f9ff98f [ 818.434424] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 309a7bae-82f5-4b9e-ac86-e0f1803f2585 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.434964] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 8a63021a4f4e406c9ad5ac4ecb1ba04f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 818.445146] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a63021a4f4e406c9ad5ac4ecb1ba04f [ 818.677961] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 818.680177] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg ebbf93cc5ea44cd6bae8cd6fedab1e17 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 818.712294] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebbf93cc5ea44cd6bae8cd6fedab1e17 [ 818.936969] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 3f99ec88-f05f-4583-b08b-d40fb37e275e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 818.937238] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 818.937389] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 819.198471] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.200110] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056cdfb3-48ad-4f44-b40e-616529e15757 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.207750] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8f1132-56b1-44ce-96d9-f844ca8f6a89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.237261] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a5f7c7-c966-4989-a5fd-8b301abe6165 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.244630] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee835d7f-0ee5-4c87-996b-3a4c55f23146 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.257736] env[62109]: DEBUG nova.compute.provider_tree [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.258322] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ddd794090cf24d748941d300bb36dc35 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 819.265664] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddd794090cf24d748941d300bb36dc35 [ 819.761286] env[62109]: DEBUG nova.scheduler.client.report [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.763746] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg eaa4c88bd77d4148b434842a9c5d38eb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 819.775142] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaa4c88bd77d4148b434842a9c5d38eb [ 820.266649] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 820.266969] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.937s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.267289] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.171s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.269234] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg a45c58026ac4452db014f35f15d85c4f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 820.270382] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.270574] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Cleaning up deleted instances {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 820.271781] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg fe4cf4e8367544a59dcaad83b70b8167 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 820.291564] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe4cf4e8367544a59dcaad83b70b8167 [ 820.302274] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a45c58026ac4452db014f35f15d85c4f [ 820.778529] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] There are 2 instances to clean {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 820.778529] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 9ebd0ef6-4a2d-414e-88e9-9b3cc739bb55] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11239}} [ 820.778529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 147f9608bd7040d2a961dd419deb0f09 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 820.822504] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 147f9608bd7040d2a961dd419deb0f09 [ 821.047164] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8d502a-82dd-4c65-9e86-007a7f58d749 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.055256] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e98e3d9-54d0-41e8-8ff8-23a31277a430 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.089497] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c116925-c331-4561-aa44-382a91effa1f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.099384] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06012a3-cf23-4d1f-bbea-b9d2f26034d2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.112737] env[62109]: DEBUG nova.compute.provider_tree [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.113404] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg a2ade843ae184e8a9daff87febb07c2a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 821.120984] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2ade843ae184e8a9daff87febb07c2a [ 821.280602] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 7bac3f71-a8a9-47fb-81c8-1b2b7fbdde8f] Instance has had 0 of 5 cleanup attempts {{(pid=62109) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11239}} [ 821.281951] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg c8a6777f220942b3ababf2cf2678dce5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 821.302436] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8a6777f220942b3ababf2cf2678dce5 [ 821.622392] env[62109]: DEBUG nova.scheduler.client.report [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 821.625077] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 3ba8cb80dedf44609c71bc62064b6307 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 821.635989] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ba8cb80dedf44609c71bc62064b6307 [ 821.797230] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.797230] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Cleaning up deleted instances with incomplete migration {{(pid=62109) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11264}} [ 821.798432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 3aa1efbe32cc415e9909f4761ab2d524 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 821.807117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3aa1efbe32cc415e9909f4761ab2d524 [ 822.131771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.861s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.131771] env[62109]: ERROR nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. [ 822.131771] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Traceback (most recent call last): [ 822.131771] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 822.131771] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self.driver.spawn(context, instance, image_meta, [ 822.131771] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 822.131771] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self._vmops.spawn(context, instance, image_meta, injected_files, [ 822.131771] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 822.131771] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] vm_ref = self.build_virtual_machine(instance, [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] vif_infos = vmwarevif.get_vif_info(self._session, [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] for vif in network_info: [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] return self._sync_wrapper(fn, *args, **kwargs) [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self.wait() [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self[:] = self._gt.wait() [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] return self._exit_event.wait() [ 822.132313] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] current.throw(*self._exc) [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] result = function(*args, **kwargs) [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] return func(*args, **kwargs) [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] raise e [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] nwinfo = self.network_api.allocate_for_instance( [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] created_port_ids = self._update_ports_for_instance( [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 822.132675] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] with excutils.save_and_reraise_exception(): [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] self.force_reraise() [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] raise self.value [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] updated_port = self._update_port( [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] _ensure_no_port_binding_failure(port) [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] raise exception.PortBindingFailed(port_id=port['id']) [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] nova.exception.PortBindingFailed: Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. [ 822.133080] env[62109]: ERROR nova.compute.manager [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] [ 822.133396] env[62109]: DEBUG nova.compute.utils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 822.133396] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.891s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.136376] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 2e9ea0c7f46e48b691ba7901eb7f3e5c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 822.136376] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Build of instance e8c77459-e3a3-4a68-9f76-0757dd0f2587 was re-scheduled: Binding failed for port a1df0846-1c39-4992-9a51-d7ac2e394a3c, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 822.136738] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 822.137056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.137363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquired lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.137651] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 822.138264] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 23b8183b4d1b4e0db3f13f4c4f7696c0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 822.165023] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23b8183b4d1b4e0db3f13f4c4f7696c0 [ 822.177123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e9ea0c7f46e48b691ba7901eb7f3e5c [ 822.301003] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.301003] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 451dccf6e8864e41b98db1d39d7c2a97 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 822.308055] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 451dccf6e8864e41b98db1d39d7c2a97 [ 822.657907] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.744486] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.745040] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 49a2f2d995fe44e19439c941e857a695 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 822.753858] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49a2f2d995fe44e19439c941e857a695 [ 822.890393] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d1c0d5-7633-4fda-bd5c-b166cc59e0fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.897822] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d49a2b-fe2d-474c-92be-00c29296b453 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.928344] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b34e144-e1f8-42b9-ab22-638681aca1ae {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.935391] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122b6901-db81-483d-9a43-a3fe9ca0097f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.948364] env[62109]: DEBUG nova.compute.provider_tree [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.948926] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 33a9c97fd5a74b8db0e58daf73536835 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 822.959919] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33a9c97fd5a74b8db0e58daf73536835 [ 823.247850] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Releasing lock "refresh_cache-e8c77459-e3a3-4a68-9f76-0757dd0f2587" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.248162] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 823.248364] env[62109]: DEBUG nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 823.248532] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 823.260673] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 823.261181] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 778da92e5cc24f14b20d746bf5f23aea in queue reply_7522b64acfeb4981b1f36928b040d568 [ 823.268825] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 778da92e5cc24f14b20d746bf5f23aea [ 823.452031] env[62109]: DEBUG nova.scheduler.client.report [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 823.454889] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 59cfbbb0974f4340aaa12942ff17075e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 823.465925] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59cfbbb0974f4340aaa12942ff17075e [ 823.763396] env[62109]: DEBUG nova.network.neutron [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.763923] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 2de90bbc4b5746c9a9e8c4e533251549 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 823.772049] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2de90bbc4b5746c9a9e8c4e533251549 [ 823.960049] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.828s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.960049] env[62109]: ERROR nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. [ 823.960049] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Traceback (most recent call last): [ 823.960049] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 823.960049] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self.driver.spawn(context, instance, image_meta, [ 823.960049] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 823.960049] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self._vmops.spawn(context, instance, image_meta, injected_files, [ 823.960049] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 823.960049] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] vm_ref = self.build_virtual_machine(instance, [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] vif_infos = vmwarevif.get_vif_info(self._session, [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] for vif in network_info: [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] return self._sync_wrapper(fn, *args, **kwargs) [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self.wait() [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self[:] = self._gt.wait() [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] return self._exit_event.wait() [ 823.960599] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] current.throw(*self._exc) [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] result = function(*args, **kwargs) [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] return func(*args, **kwargs) [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] raise e [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] nwinfo = self.network_api.allocate_for_instance( [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] created_port_ids = self._update_ports_for_instance( [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 823.960939] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] with excutils.save_and_reraise_exception(): [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] self.force_reraise() [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] raise self.value [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] updated_port = self._update_port( [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] _ensure_no_port_binding_failure(port) [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] raise exception.PortBindingFailed(port_id=port['id']) [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] nova.exception.PortBindingFailed: Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. [ 823.961310] env[62109]: ERROR nova.compute.manager [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] [ 823.961586] env[62109]: DEBUG nova.compute.utils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 823.961586] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.869s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.963015] env[62109]: INFO nova.compute.claims [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.964580] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 4828f996dd15482bb9ab61b1b7b3c08a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 823.965685] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Build of instance 9a1c4327-64b3-4c4d-b6ae-77959084b405 was re-scheduled: Binding failed for port f971e1a0-a370-44fc-acd0-66f2e04e3b60, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 823.966108] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 823.966329] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.966474] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquired lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.966630] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 823.967013] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 7f4985b4334645d7b5575dfe78472bdc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 823.974025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f4985b4334645d7b5575dfe78472bdc [ 823.997196] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4828f996dd15482bb9ab61b1b7b3c08a [ 824.266011] env[62109]: INFO nova.compute.manager [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: e8c77459-e3a3-4a68-9f76-0757dd0f2587] Took 1.02 seconds to deallocate network for instance. [ 824.267751] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 0336d15c00d745cfae24a72b8949f9e9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 824.302104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0336d15c00d745cfae24a72b8949f9e9 [ 824.469780] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 3e17f3728ac34374b36819c18fedd8df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 824.477154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e17f3728ac34374b36819c18fedd8df [ 824.484829] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 824.567037] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.567703] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg e73e2fb5b6d246208b14c0e03150f76c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 824.577816] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e73e2fb5b6d246208b14c0e03150f76c [ 824.772018] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 387a741427044aa8a29e185b492cc143 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 824.801285] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 387a741427044aa8a29e185b492cc143 [ 825.070808] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Releasing lock "refresh_cache-9a1c4327-64b3-4c4d-b6ae-77959084b405" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.071108] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 825.071367] env[62109]: DEBUG nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 825.071600] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 825.086070] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 825.086614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg f7ea13e407874ea485e3bf5fcfb11caf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 825.092830] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7ea13e407874ea485e3bf5fcfb11caf [ 825.217393] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba70046-07fc-4adf-945a-707be21cb5b6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.224871] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323336ea-18c9-40ee-8afa-e57a4cc528fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.253733] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdadd282-786d-40a9-a9bd-b8ea0106a582 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.260529] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf95e22-9e94-4c2e-bae4-44eebd6e9e6f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.273083] env[62109]: DEBUG nova.compute.provider_tree [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.273579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg fd93c349cf6b4654bd589c82e32f1cda in queue reply_7522b64acfeb4981b1f36928b040d568 [ 825.282098] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd93c349cf6b4654bd589c82e32f1cda [ 825.293600] env[62109]: INFO nova.scheduler.client.report [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Deleted allocations for instance e8c77459-e3a3-4a68-9f76-0757dd0f2587 [ 825.299956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 6bc156bb34a646559e98a85885ea6833 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 825.311351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bc156bb34a646559e98a85885ea6833 [ 825.588551] env[62109]: DEBUG nova.network.neutron [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.589164] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg ea2670a0723e49728146f9ad263adbff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 825.597448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea2670a0723e49728146f9ad263adbff [ 825.778536] env[62109]: DEBUG nova.scheduler.client.report [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.780911] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 4df8ce0fcf86459796b9d81a6fd8c62f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 825.791843] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4df8ce0fcf86459796b9d81a6fd8c62f [ 825.802206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-3d82fb94-09c2-4954-8f91-55aeb880b429 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "e8c77459-e3a3-4a68-9f76-0757dd0f2587" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 168.371s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.802707] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 58c891cc8efd4033b6aeb0c03fd8c1b9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 825.811985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58c891cc8efd4033b6aeb0c03fd8c1b9 [ 826.091046] env[62109]: INFO nova.compute.manager [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: 9a1c4327-64b3-4c4d-b6ae-77959084b405] Took 1.02 seconds to deallocate network for instance. [ 826.092819] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 665cfe7aadea47c6a0217f1f03d451b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 826.122742] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 665cfe7aadea47c6a0217f1f03d451b0 [ 826.283782] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.322s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.284347] env[62109]: DEBUG nova.compute.manager [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 826.286020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg a9ed9e37385f41a0b46f23b720323ac9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 826.286992] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.571s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.288730] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 4be2914bb47f40f7b095a6dbbc7b5b5c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 826.304405] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 826.306117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg ddd10e48a121452cb5af65ff5cd0ac69 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 826.324436] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4be2914bb47f40f7b095a6dbbc7b5b5c [ 826.327163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9ed9e37385f41a0b46f23b720323ac9 [ 826.340027] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddd10e48a121452cb5af65ff5cd0ac69 [ 826.597022] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 1501a249985e4b589fc531f63165c6d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 826.629325] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1501a249985e4b589fc531f63165c6d7 [ 826.791304] env[62109]: DEBUG nova.compute.utils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 826.791942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 0366bb2480734c36a230aa8f1a143757 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 826.792880] env[62109]: DEBUG nova.compute.manager [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 826.804712] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0366bb2480734c36a230aa8f1a143757 [ 826.828136] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.036376] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381b83e2-fc4e-47f8-8dc9-38b6c7e5142e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.040564] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7ed542-e2a7-4f48-8f8a-63fc60358dca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.070914] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131587eb-d43d-4841-8b9e-46812d53c7f4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.078394] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10aec47f-ac88-475e-a24f-e5b7f149bd1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.092973] env[62109]: DEBUG nova.compute.provider_tree [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.093851] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 16d0ba78ce2f4aeebd3406399be90e09 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 827.104819] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16d0ba78ce2f4aeebd3406399be90e09 [ 827.119519] env[62109]: INFO nova.scheduler.client.report [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Deleted allocations for instance 9a1c4327-64b3-4c4d-b6ae-77959084b405 [ 827.125258] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 1fb33a88c58e4197989a60ae44bddd01 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 827.134478] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fb33a88c58e4197989a60ae44bddd01 [ 827.294050] env[62109]: DEBUG nova.compute.manager [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 827.295713] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg db66877d5ae74989963ad53bafd8966c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 827.325589] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db66877d5ae74989963ad53bafd8966c [ 827.603519] env[62109]: DEBUG nova.scheduler.client.report [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.605987] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 21a2ec96afee4db0be5013f018e2b61d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 827.617587] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21a2ec96afee4db0be5013f018e2b61d [ 827.632028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-549cd6aa-84cb-440a-80fe-04e39bab5060 tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "9a1c4327-64b3-4c4d-b6ae-77959084b405" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 169.641s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.632028] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 7cf9ff48ae1a4dba9d1a6a0245a6ac5f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 827.636513] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cf9ff48ae1a4dba9d1a6a0245a6ac5f [ 827.800511] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 15eb7b35125a4a509d020854ea072094 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 827.829001] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15eb7b35125a4a509d020854ea072094 [ 828.108917] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.822s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.109962] env[62109]: ERROR nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Traceback (most recent call last): [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self.driver.spawn(context, instance, image_meta, [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] vm_ref = self.build_virtual_machine(instance, [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 828.109962] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] for vif in network_info: [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] return self._sync_wrapper(fn, *args, **kwargs) [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self.wait() [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self[:] = self._gt.wait() [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] return self._exit_event.wait() [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] current.throw(*self._exc) [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 828.110362] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] result = function(*args, **kwargs) [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] return func(*args, **kwargs) [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] raise e [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] nwinfo = self.network_api.allocate_for_instance( [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] created_port_ids = self._update_ports_for_instance( [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] with excutils.save_and_reraise_exception(): [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] self.force_reraise() [ 828.110728] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] raise self.value [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] updated_port = self._update_port( [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] _ensure_no_port_binding_failure(port) [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] raise exception.PortBindingFailed(port_id=port['id']) [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] nova.exception.PortBindingFailed: Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. [ 828.111083] env[62109]: ERROR nova.compute.manager [instance: a6ec5486-0843-4c38-b187-35d5296965a7] [ 828.111083] env[62109]: DEBUG nova.compute.utils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 828.111918] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Build of instance a6ec5486-0843-4c38-b187-35d5296965a7 was re-scheduled: Binding failed for port e8178202-8899-4251-9c65-7ab8b9217948, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 828.112334] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 828.112567] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquiring lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.112712] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Acquired lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.112867] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 828.113293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 2f486b4ad59247abbdb22a94eba66c85 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 828.114461] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.686s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.116050] env[62109]: INFO nova.compute.claims [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.117549] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 6b897fa7d2034048b5737c81b666be88 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 828.120178] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f486b4ad59247abbdb22a94eba66c85 [ 828.129537] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 828.131188] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 087a4c35dae04c28b4c979ba4a38d90a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 828.164084] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b897fa7d2034048b5737c81b666be88 [ 828.178310] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 087a4c35dae04c28b4c979ba4a38d90a [ 828.303642] env[62109]: DEBUG nova.compute.manager [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 828.327886] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 828.328218] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 828.328377] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.328558] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 828.328700] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.328843] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 828.329080] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 828.329260] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 828.329429] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 828.329589] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 828.329756] env[62109]: DEBUG nova.virt.hardware [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 828.330586] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4dfdbf-27cc-439f-80e7-749d09c91777 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.338671] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b363fc5d-4a90-4cd0-802d-b2826e32f568 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.351351] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 828.356868] env[62109]: DEBUG oslo.service.loopingcall [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.357081] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 828.357272] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c2cf501-8fde-4ae3-b17e-9db92e456210 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.373460] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 828.373460] env[62109]: value = "task-401476" [ 828.373460] env[62109]: _type = "Task" [ 828.373460] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.383018] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401476, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.621156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 8555598607e946f1b14d5a1f99fdae5d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 828.629577] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8555598607e946f1b14d5a1f99fdae5d [ 828.632843] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 828.648936] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.735290] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.735834] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 5e0f9dfec8f84b2c89ff59de0bb5b92e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 828.744060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e0f9dfec8f84b2c89ff59de0bb5b92e [ 828.884463] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401476, 'name': CreateVM_Task, 'duration_secs': 0.238114} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.884710] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 828.885194] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.885368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.885669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 828.885912] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2bfe020-3cae-47ee-999d-b0c3679be1d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.890225] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 828.890225] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]521b940a-502e-76f4-96d1-cc3ff16a08ce" [ 828.890225] env[62109]: _type = "Task" [ 828.890225] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.897430] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]521b940a-502e-76f4-96d1-cc3ff16a08ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.237757] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Releasing lock "refresh_cache-a6ec5486-0843-4c38-b187-35d5296965a7" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.238070] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 829.238272] env[62109]: DEBUG nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 829.238441] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 829.253281] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.253814] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 2e3af273f4714687915be006fc8b6558 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 829.260613] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e3af273f4714687915be006fc8b6558 [ 829.347130] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b3846e-4685-4807-baf8-f27ab6af54b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.355094] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa15f3e-7036-4b08-8a79-6ebbf23cf053 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.384942] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b324796-743e-4f08-9a48-cb0dd28b9c46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.395889] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750cf025-7db8-4f78-b588-8c820933a69a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.404477] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]521b940a-502e-76f4-96d1-cc3ff16a08ce, 'name': SearchDatastore_Task, 'duration_secs': 0.009619} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.413143] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.413532] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.413898] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.414187] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.414489] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.415000] env[62109]: DEBUG nova.compute.provider_tree [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.415593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 8b94f17c79224ea98200e54b7800d984 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 829.416603] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92ad9dd1-8ed9-4b11-ab5e-c0f8fe98fb9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.423236] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b94f17c79224ea98200e54b7800d984 [ 829.425241] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.425546] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 829.426296] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c159d0cf-bee3-470b-93a1-1ded0d5700c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.431275] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 829.431275] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]525c04bb-a04a-0892-09a7-98bbbbc13046" [ 829.431275] env[62109]: _type = "Task" [ 829.431275] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.438889] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]525c04bb-a04a-0892-09a7-98bbbbc13046, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.757272] env[62109]: DEBUG nova.network.neutron [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.757272] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 623f296060ba4c70a9042de962842fa2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 829.764684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 623f296060ba4c70a9042de962842fa2 [ 829.920181] env[62109]: DEBUG nova.scheduler.client.report [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.922654] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 734ab4ce1ff741b680759959dbf14b21 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 829.936045] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 734ab4ce1ff741b680759959dbf14b21 [ 829.942320] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]525c04bb-a04a-0892-09a7-98bbbbc13046, 'name': SearchDatastore_Task, 'duration_secs': 0.007691} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.943054] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c0cd260-bd94-470d-9ffd-e818b1a59afd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.947991] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 829.947991] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5259ab9e-8b50-f117-ba65-3809ec6c37c3" [ 829.947991] env[62109]: _type = "Task" [ 829.947991] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.955239] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5259ab9e-8b50-f117-ba65-3809ec6c37c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.259557] env[62109]: INFO nova.compute.manager [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] [instance: a6ec5486-0843-4c38-b187-35d5296965a7] Took 1.02 seconds to deallocate network for instance. [ 830.261410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg e9e07dee9ab94224a94b149aa1a4a3cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 830.292244] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9e07dee9ab94224a94b149aa1a4a3cd [ 830.425569] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.311s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.426163] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 830.427862] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg ef48a99b49ca4c488a67e806c577b5d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 830.428879] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.849s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.431388] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg a7c9e98b7e5148859cb1957c3090333a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 830.458190] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5259ab9e-8b50-f117-ba65-3809ec6c37c3, 'name': SearchDatastore_Task, 'duration_secs': 0.008301} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.458757] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef48a99b49ca4c488a67e806c577b5d4 [ 830.459213] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.459467] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad/900e1e1e-5635-4782-bd87-046dd2af7dad.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 830.459724] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6b7840b-3b7c-458c-b670-26ed1814ccd8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.463095] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7c9e98b7e5148859cb1957c3090333a [ 830.467289] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 830.467289] env[62109]: value = "task-401477" [ 830.467289] env[62109]: _type = "Task" [ 830.467289] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.474580] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401477, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.766205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg c1c1838821a6476ba8600d63b798d90a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 830.802747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1c1838821a6476ba8600d63b798d90a [ 830.934499] env[62109]: DEBUG nova.compute.utils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.935159] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 4c5ebaecc987471fb5c16779338ce4c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 830.940167] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 830.940463] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 830.947278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c5ebaecc987471fb5c16779338ce4c4 [ 830.977457] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401477, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.428302} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.977717] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad/900e1e1e-5635-4782-bd87-046dd2af7dad.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 830.977976] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 830.980327] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e64def2-12f4-463a-954c-9a1128adc5e4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.983240] env[62109]: DEBUG nova.policy [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85bae9b15b0f43c0bc6e4d4f4c6a28e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4392957210d14f89af11bf7b1bf7ffc7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 830.989059] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 830.989059] env[62109]: value = "task-401478" [ 830.989059] env[62109]: _type = "Task" [ 830.989059] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.998334] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401478, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.167561] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f336640f-7ff8-4665-bd14-262c3b0d71f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.174419] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1768ec5-6b07-4615-8c84-316c9d9c1a90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.207739] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f346677-b6ea-4807-b951-5a4aa8964269 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.219285] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07703d27-229e-4a2e-9e7c-6de5b490417f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.234513] env[62109]: DEBUG nova.compute.provider_tree [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.235043] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 1a9c02f4f9c94b4bb5ad88f1aa699f93 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 831.243276] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Successfully created port: 2be45127-dad8-4c66-98a1-3f9c554f3a2e {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.244980] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a9c02f4f9c94b4bb5ad88f1aa699f93 [ 831.291231] env[62109]: INFO nova.scheduler.client.report [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Deleted allocations for instance a6ec5486-0843-4c38-b187-35d5296965a7 [ 831.298342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Expecting reply to msg 45456c5ed1d746bf9e44876efd28e4a1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 831.317247] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45456c5ed1d746bf9e44876efd28e4a1 [ 831.440529] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 831.442497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 427ea57e9e1846a08aa97df863b47e97 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 831.473530] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 427ea57e9e1846a08aa97df863b47e97 [ 831.499352] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401478, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062145} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.499591] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.500353] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfaa574-e674-4c3a-ab5c-bcdc9683d9a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.520074] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad/900e1e1e-5635-4782-bd87-046dd2af7dad.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.520328] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4da84ec0-7af1-439a-b4f5-c194aab26b20 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.540690] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 831.540690] env[62109]: value = "task-401479" [ 831.540690] env[62109]: _type = "Task" [ 831.540690] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.550718] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401479, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.550803] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Successfully created port: e2f44cd2-ed46-48b1-ac6c-813151fb254d {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.738260] env[62109]: DEBUG nova.scheduler.client.report [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 831.741521] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 3f8021735d994db09b7f282a1aa8e898 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 831.756270] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f8021735d994db09b7f282a1aa8e898 [ 831.800430] env[62109]: DEBUG oslo_concurrency.lockutils [None req-da75bf2d-3b72-4d73-ab8c-b9ac523ffddf tempest-ListServerFiltersTestJSON-1171420207 tempest-ListServerFiltersTestJSON-1171420207-project-member] Lock "a6ec5486-0843-4c38-b187-35d5296965a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 173.484s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.801022] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 2a7fcce5b6514248b36f51ecb135aa43 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 831.810315] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a7fcce5b6514248b36f51ecb135aa43 [ 831.829035] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Successfully created port: 7e2b1da9-5acc-4b7c-8f49-5f7d4a4b99e4 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.947355] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 3ddeb08691624be88396df2acd55c1eb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 831.979297] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ddeb08691624be88396df2acd55c1eb [ 832.059123] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401479, 'name': ReconfigVM_Task, 'duration_secs': 0.267115} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.059123] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad/900e1e1e-5635-4782-bd87-046dd2af7dad.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.059123] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69fad4ef-e3a3-4aca-a923-ae3d76c0a110 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.063784] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 832.063784] env[62109]: value = "task-401480" [ 832.063784] env[62109]: _type = "Task" [ 832.063784] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.071826] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401480, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.245477] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.815s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.245477] env[62109]: ERROR nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. [ 832.245477] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Traceback (most recent call last): [ 832.245477] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 832.245477] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self.driver.spawn(context, instance, image_meta, [ 832.245477] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 832.245477] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 832.245477] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 832.245477] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] vm_ref = self.build_virtual_machine(instance, [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] for vif in network_info: [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] return self._sync_wrapper(fn, *args, **kwargs) [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self.wait() [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self[:] = self._gt.wait() [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] return self._exit_event.wait() [ 832.245831] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] result = hub.switch() [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] return self.greenlet.switch() [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] result = function(*args, **kwargs) [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] return func(*args, **kwargs) [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] raise e [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] nwinfo = self.network_api.allocate_for_instance( [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 832.246190] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] created_port_ids = self._update_ports_for_instance( [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] with excutils.save_and_reraise_exception(): [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] self.force_reraise() [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] raise self.value [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] updated_port = self._update_port( [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] _ensure_no_port_binding_failure(port) [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 832.246582] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] raise exception.PortBindingFailed(port_id=port['id']) [ 832.246911] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] nova.exception.PortBindingFailed: Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. [ 832.246911] env[62109]: ERROR nova.compute.manager [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] [ 832.246911] env[62109]: DEBUG nova.compute.utils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 832.246911] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.278s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.248330] env[62109]: INFO nova.compute.claims [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.250002] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg f069fb5b4f7d47a5a3a4c556581f9b16 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 832.251110] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Build of instance c7ec619c-1b00-4d58-a593-671c0139c4e3 was re-scheduled: Binding failed for port bf7efc6a-fe14-4763-9bd8-3116fd5d40e2, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 832.251526] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 832.251731] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Acquiring lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.251865] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Acquired lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.252029] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 832.252370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg e2c3295e213749cd9b58a41dbe75fbef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 832.260374] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2c3295e213749cd9b58a41dbe75fbef [ 832.284240] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f069fb5b4f7d47a5a3a4c556581f9b16 [ 832.308388] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 832.310227] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 948e5bde05d7457784aa2094dff13d7b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 832.342145] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 948e5bde05d7457784aa2094dff13d7b [ 832.452287] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 832.478542] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 832.478815] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 832.478937] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.479147] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 832.479295] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.479438] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 832.479647] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 832.479960] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 832.479960] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 832.480123] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 832.480298] env[62109]: DEBUG nova.virt.hardware [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.481150] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcd3452-c7b5-4423-aba0-9659297111df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.489815] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3aecf55-c954-47a9-9af7-a6d567b97199 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.573995] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401480, 'name': Rename_Task, 'duration_secs': 0.127374} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.574276] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 832.574522] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8df66815-0151-4d46-a9a2-5b49a205fc9f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.581115] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 832.581115] env[62109]: value = "task-401481" [ 832.581115] env[62109]: _type = "Task" [ 832.581115] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.588777] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.755458] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 49e7c27e94a3460e8074a1b30c0cf729 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 832.764680] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49e7c27e94a3460e8074a1b30c0cf729 [ 832.778406] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.832603] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.885687] env[62109]: DEBUG nova.compute.manager [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Received event network-changed-2be45127-dad8-4c66-98a1-3f9c554f3a2e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 832.885913] env[62109]: DEBUG nova.compute.manager [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Refreshing instance network info cache due to event network-changed-2be45127-dad8-4c66-98a1-3f9c554f3a2e. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 832.886880] env[62109]: DEBUG oslo_concurrency.lockutils [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] Acquiring lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.886880] env[62109]: DEBUG oslo_concurrency.lockutils [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] Acquired lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.886880] env[62109]: DEBUG nova.network.neutron [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Refreshing network info cache for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 832.886880] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] Expecting reply to msg 7bfcd02ae2564fb4bd36d29ff08a0680 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 832.895224] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bfcd02ae2564fb4bd36d29ff08a0680 [ 832.915179] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.915716] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 313cbebc2b884ac5847a8ed4c56b2471 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 832.925251] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 313cbebc2b884ac5847a8ed4c56b2471 [ 833.060327] env[62109]: ERROR nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. [ 833.060327] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 833.060327] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 833.060327] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 833.060327] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 833.060327] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 833.060327] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 833.060327] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 833.060327] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.060327] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 833.060327] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.060327] env[62109]: ERROR nova.compute.manager raise self.value [ 833.060327] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 833.060327] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 833.060327] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.060327] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 833.060790] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.060790] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 833.060790] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. [ 833.060790] env[62109]: ERROR nova.compute.manager [ 833.060790] env[62109]: Traceback (most recent call last): [ 833.060790] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 833.060790] env[62109]: listener.cb(fileno) [ 833.060790] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 833.060790] env[62109]: result = function(*args, **kwargs) [ 833.060790] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 833.060790] env[62109]: return func(*args, **kwargs) [ 833.060790] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 833.060790] env[62109]: raise e [ 833.060790] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 833.060790] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 833.060790] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 833.060790] env[62109]: created_port_ids = self._update_ports_for_instance( [ 833.060790] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 833.060790] env[62109]: with excutils.save_and_reraise_exception(): [ 833.060790] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.060790] env[62109]: self.force_reraise() [ 833.060790] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.060790] env[62109]: raise self.value [ 833.060790] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 833.060790] env[62109]: updated_port = self._update_port( [ 833.060790] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.060790] env[62109]: _ensure_no_port_binding_failure(port) [ 833.060790] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.060790] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 833.061505] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. [ 833.061505] env[62109]: Removing descriptor: 19 [ 833.061505] env[62109]: ERROR nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Traceback (most recent call last): [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] yield resources [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self.driver.spawn(context, instance, image_meta, [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 833.061505] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] vm_ref = self.build_virtual_machine(instance, [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] vif_infos = vmwarevif.get_vif_info(self._session, [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] for vif in network_info: [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] return self._sync_wrapper(fn, *args, **kwargs) [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self.wait() [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self[:] = self._gt.wait() [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] return self._exit_event.wait() [ 833.061801] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] result = hub.switch() [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] return self.greenlet.switch() [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] result = function(*args, **kwargs) [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] return func(*args, **kwargs) [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] raise e [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] nwinfo = self.network_api.allocate_for_instance( [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 833.062110] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] created_port_ids = self._update_ports_for_instance( [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] with excutils.save_and_reraise_exception(): [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self.force_reraise() [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] raise self.value [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] updated_port = self._update_port( [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] _ensure_no_port_binding_failure(port) [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.062468] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] raise exception.PortBindingFailed(port_id=port['id']) [ 833.062735] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] nova.exception.PortBindingFailed: Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. [ 833.062735] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] [ 833.062735] env[62109]: INFO nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Terminating instance [ 833.063477] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.090710] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401481, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.417340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Releasing lock "refresh_cache-c7ec619c-1b00-4d58-a593-671c0139c4e3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.417599] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 833.417760] env[62109]: DEBUG nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 833.417923] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.428374] env[62109]: DEBUG nova.network.neutron [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.434586] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.435170] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 4594c0585bdf4907adbd9408364f46cf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 833.442874] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4594c0585bdf4907adbd9408364f46cf [ 833.546161] env[62109]: DEBUG nova.network.neutron [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.547182] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] Expecting reply to msg abcfae2586404ae19a63f4824cf0831c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 833.556391] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbb523d-87b1-424f-9fa3-d0bd641a65de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.560637] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abcfae2586404ae19a63f4824cf0831c [ 833.565617] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda80864-2bcf-4e74-9c8b-33e4773b1efd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.598355] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6b2bbc-f457-444f-8e4c-4b1439fee74e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.606214] env[62109]: DEBUG oslo_vmware.api [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401481, 'name': PowerOnVM_Task, 'duration_secs': 0.638039} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.608316] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 833.608664] env[62109]: INFO nova.compute.manager [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Took 5.30 seconds to spawn the instance on the hypervisor. [ 833.608991] env[62109]: DEBUG nova.compute.manager [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 833.610203] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a5d7c6-4f15-4cf4-a5d7-093b3db03187 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.613836] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a35c605-d494-4e86-a552-cdc6a1765697 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.622546] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 4a655c9bfe314fe2842ab8d2d15ea7b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 833.632322] env[62109]: DEBUG nova.compute.provider_tree [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.633129] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 848caac0c9bb48acae4a4a125d5a2445 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 833.640591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 848caac0c9bb48acae4a4a125d5a2445 [ 833.656737] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a655c9bfe314fe2842ab8d2d15ea7b8 [ 833.938002] env[62109]: DEBUG nova.network.neutron [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.938683] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg ced6a81b867d49e5b2d6eafc9a7276f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 833.953147] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ced6a81b867d49e5b2d6eafc9a7276f7 [ 834.050304] env[62109]: DEBUG oslo_concurrency.lockutils [req-722cf135-b405-496b-9b0b-3527dfe29d16 req-7c9d4bba-4f13-47f7-ab3f-13695a2c3c98 service nova] Releasing lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.050731] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquired lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.050910] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 834.051448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 8a2e321f7c1746419189f5f6e0281baa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.058752] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a2e321f7c1746419189f5f6e0281baa [ 834.131283] env[62109]: INFO nova.compute.manager [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Took 34.06 seconds to build instance. [ 834.131850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 777414b27a4d4d57b3d58668bc968ba9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.136310] env[62109]: DEBUG nova.scheduler.client.report [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 834.138556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 828c5095b0e447f7b8779a930ba51b9a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.144534] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 777414b27a4d4d57b3d58668bc968ba9 [ 834.151593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 828c5095b0e447f7b8779a930ba51b9a [ 834.361399] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg e32f170ca1f14961a71316920d73c171 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.371982] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e32f170ca1f14961a71316920d73c171 [ 834.453925] env[62109]: INFO nova.compute.manager [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] [instance: c7ec619c-1b00-4d58-a593-671c0139c4e3] Took 1.04 seconds to deallocate network for instance. [ 834.455858] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg 3dc97e54923e46058a1cee75fc907fdb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.496885] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dc97e54923e46058a1cee75fc907fdb [ 834.585032] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.633683] env[62109]: DEBUG oslo_concurrency.lockutils [None req-894882f9-f909-4f49-a03f-0e4f67e2b6c5 tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "900e1e1e-5635-4782-bd87-046dd2af7dad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.016s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.634191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 17db5e7361a742f9a6c9e659db0c0955 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.641023] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.641508] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 834.643259] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg a81ef2009fe844e0af9e8f84afe4492a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.644524] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.823s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.645535] env[62109]: INFO nova.compute.claims [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.646966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 6a022e18fa014acdb1095c95e3969133 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.648591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17db5e7361a742f9a6c9e659db0c0955 [ 834.679412] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a81ef2009fe844e0af9e8f84afe4492a [ 834.680017] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a022e18fa014acdb1095c95e3969133 [ 834.686295] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.686792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 92a8cb61f7fa4ea6b9fecfe43d0cf6c6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 834.693329] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92a8cb61f7fa4ea6b9fecfe43d0cf6c6 [ 834.864170] env[62109]: INFO nova.compute.manager [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Rebuilding instance [ 834.914792] env[62109]: DEBUG nova.compute.manager [req-9abc276d-f0a7-4649-a1f2-8141663c1fd1 req-758bad10-5a60-46db-b60c-6e8d13968684 service nova] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Received event network-vif-deleted-2be45127-dad8-4c66-98a1-3f9c554f3a2e {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 834.962871] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg a5de3b8af0544b7dbf1d0c478bf1198e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.006564] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5de3b8af0544b7dbf1d0c478bf1198e [ 835.057844] env[62109]: DEBUG nova.compute.manager [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 835.058647] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ead4a7-5a63-430a-8f5a-41c5fc585d46 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.066741] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 4b36d46810d14751b24f20d1a097ae0d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.113422] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b36d46810d14751b24f20d1a097ae0d [ 835.135981] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 835.138085] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg ac3f956424a94ec59a8c01efd8b3a7b2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.149623] env[62109]: DEBUG nova.compute.utils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.150233] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg a1db9be072b0443a93855333b96997fa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.152087] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg e72c3737cba9448a9ca80fcd88e63bf7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.152804] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 835.152966] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 835.159787] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e72c3737cba9448a9ca80fcd88e63bf7 [ 835.160697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1db9be072b0443a93855333b96997fa [ 835.182428] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac3f956424a94ec59a8c01efd8b3a7b2 [ 835.188865] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Releasing lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.189386] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 835.189603] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 835.189912] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-515be6ac-a68b-4ef4-a135-c3ce7502199d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.193646] env[62109]: DEBUG nova.policy [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aecef81773a14855b22e49bfdee52c23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '648690c514264bdba16759f1dce64e4b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 835.201723] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743f57c0-2442-4a11-80e9-6ddd90967ccc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.226367] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance afc5587e-7fd5-4b07-aff8-98ef8358985f could not be found. [ 835.226595] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 835.226774] env[62109]: INFO nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 835.227017] env[62109]: DEBUG oslo.service.loopingcall [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.227233] env[62109]: DEBUG nova.compute.manager [-] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 835.227321] env[62109]: DEBUG nova.network.neutron [-] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 835.332341] env[62109]: DEBUG nova.network.neutron [-] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.496347] env[62109]: INFO nova.scheduler.client.report [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Deleted allocations for instance c7ec619c-1b00-4d58-a593-671c0139c4e3 [ 835.504606] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Expecting reply to msg becc214228d54ce5afceb8387767a115 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.522475] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg becc214228d54ce5afceb8387767a115 [ 835.525097] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Successfully created port: ae7e19e9-1d82-47c1-96f4-06020432387b {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.573384] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 835.573743] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a098758d-bf53-4f3b-bf7b-6e308b2a1916 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.581331] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 835.581331] env[62109]: value = "task-401482" [ 835.581331] env[62109]: _type = "Task" [ 835.581331] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.590394] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.654638] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.658911] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 835.661499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 6a1581a6243d499ab615b66953910523 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.703667] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 75e3ad048c0c46789161949362669d93 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.712293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75e3ad048c0c46789161949362669d93 [ 835.712888] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a1581a6243d499ab615b66953910523 [ 835.898632] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2c7c33-0f74-483f-be13-746924857e84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.906021] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f607d250-69d8-43b4-84b2-e2cd9e2e8739 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.934907] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca75b1c-8c00-4092-8302-c6359e1f74a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.941664] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3899c0e4-70f9-4955-902e-eb2d1b583bfd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.954158] env[62109]: DEBUG nova.compute.provider_tree [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.954635] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 49e71729424b470b89f32bbb46ceadd3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 835.962625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49e71729424b470b89f32bbb46ceadd3 [ 836.006206] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c806be6-d3fa-4acb-a2ea-b681958510ea tempest-ServersTestJSON-1195313588 tempest-ServersTestJSON-1195313588-project-member] Lock "c7ec619c-1b00-4d58-a593-671c0139c4e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 172.782s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.006797] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 64d2335c3f0445e084945e7e8b80871f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 836.016103] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64d2335c3f0445e084945e7e8b80871f [ 836.091201] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401482, 'name': PowerOffVM_Task, 'duration_secs': 0.184517} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.091459] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 836.091666] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 836.092414] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6505c2aa-9130-41b8-baf5-417f86c9ed7b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.100157] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 836.100379] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d16f33ad-505c-4f4b-b10d-df862c26215e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.124394] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 836.124394] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 836.124394] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Deleting the datastore file [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.124394] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2580f62-a9a2-4989-99f1-f95d9167e0ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.130841] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 836.130841] env[62109]: value = "task-401484" [ 836.130841] env[62109]: _type = "Task" [ 836.130841] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.138978] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401484, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.170289] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 1a14c04b60b9447e89333d1c82835af0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 836.205599] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a14c04b60b9447e89333d1c82835af0 [ 836.206162] env[62109]: DEBUG nova.network.neutron [-] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.207359] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 51a5e83a1eb641c68065f7dcbd0bb50c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 836.217020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51a5e83a1eb641c68065f7dcbd0bb50c [ 836.457575] env[62109]: DEBUG nova.scheduler.client.report [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.460404] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 18ec99c5ab3f4bd084d219067043d757 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 836.472228] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18ec99c5ab3f4bd084d219067043d757 [ 836.512207] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 836.514410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 8a2e5be9ef07450e9d06813101ec0383 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 836.545666] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a2e5be9ef07450e9d06813101ec0383 [ 836.548908] env[62109]: ERROR nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. [ 836.548908] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 836.548908] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 836.548908] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 836.548908] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 836.548908] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 836.548908] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 836.548908] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 836.548908] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.548908] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 836.548908] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.548908] env[62109]: ERROR nova.compute.manager raise self.value [ 836.548908] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 836.548908] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 836.548908] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.548908] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 836.549382] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.549382] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 836.549382] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. [ 836.549382] env[62109]: ERROR nova.compute.manager [ 836.549382] env[62109]: Traceback (most recent call last): [ 836.549382] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 836.549382] env[62109]: listener.cb(fileno) [ 836.549382] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.549382] env[62109]: result = function(*args, **kwargs) [ 836.549382] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 836.549382] env[62109]: return func(*args, **kwargs) [ 836.549382] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 836.549382] env[62109]: raise e [ 836.549382] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 836.549382] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 836.549382] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 836.549382] env[62109]: created_port_ids = self._update_ports_for_instance( [ 836.549382] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 836.549382] env[62109]: with excutils.save_and_reraise_exception(): [ 836.549382] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.549382] env[62109]: self.force_reraise() [ 836.549382] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.549382] env[62109]: raise self.value [ 836.549382] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 836.549382] env[62109]: updated_port = self._update_port( [ 836.549382] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.549382] env[62109]: _ensure_no_port_binding_failure(port) [ 836.549382] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.549382] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 836.550195] env[62109]: nova.exception.PortBindingFailed: Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. [ 836.550195] env[62109]: Removing descriptor: 19 [ 836.640743] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401484, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083925} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.640994] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.641203] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 836.641381] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 836.642918] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 70347528bd014719861e7f7fdc11a806 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 836.670565] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70347528bd014719861e7f7fdc11a806 [ 836.673846] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 836.697822] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 836.697822] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 836.697822] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.698029] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 836.698029] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.698266] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 836.698354] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 836.698494] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 836.699151] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 836.699151] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 836.699151] env[62109]: DEBUG nova.virt.hardware [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.699817] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380d5790-3c04-494f-86c5-b499e3d3e8b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.707798] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa8e70e-6fa0-42a4-a1e2-49225a5fba16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.711807] env[62109]: INFO nova.compute.manager [-] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Took 1.48 seconds to deallocate network for instance. [ 836.714300] env[62109]: DEBUG nova.compute.claims [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 836.714482] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.724025] env[62109]: ERROR nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Traceback (most recent call last): [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] yield resources [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self.driver.spawn(context, instance, image_meta, [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self._vmops.spawn(context, instance, image_meta, injected_files, [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] vm_ref = self.build_virtual_machine(instance, [ 836.724025] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] vif_infos = vmwarevif.get_vif_info(self._session, [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] for vif in network_info: [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] return self._sync_wrapper(fn, *args, **kwargs) [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self.wait() [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self[:] = self._gt.wait() [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] return self._exit_event.wait() [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 836.724384] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] current.throw(*self._exc) [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] result = function(*args, **kwargs) [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] return func(*args, **kwargs) [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] raise e [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] nwinfo = self.network_api.allocate_for_instance( [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] created_port_ids = self._update_ports_for_instance( [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] with excutils.save_and_reraise_exception(): [ 836.724739] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self.force_reraise() [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] raise self.value [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] updated_port = self._update_port( [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] _ensure_no_port_binding_failure(port) [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] raise exception.PortBindingFailed(port_id=port['id']) [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] nova.exception.PortBindingFailed: Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. [ 836.725097] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] [ 836.725097] env[62109]: INFO nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Terminating instance [ 836.726189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Acquiring lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.726346] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Acquired lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.726507] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 836.726917] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 3b87449439e94c7a8c9b26e4417885ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 836.733358] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b87449439e94c7a8c9b26e4417885ba [ 836.962932] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.963427] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 836.965214] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg d66be00d91554c1d8b0c5f557b900b89 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 836.966274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.190s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.974210] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg b5a9fb02b6f74381935c3457d4d61f31 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 837.004016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d66be00d91554c1d8b0c5f557b900b89 [ 837.006703] env[62109]: DEBUG nova.compute.manager [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Received event network-changed-ae7e19e9-1d82-47c1-96f4-06020432387b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 837.006908] env[62109]: DEBUG nova.compute.manager [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Refreshing instance network info cache due to event network-changed-ae7e19e9-1d82-47c1-96f4-06020432387b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 837.007096] env[62109]: DEBUG oslo_concurrency.lockutils [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] Acquiring lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.016165] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5a9fb02b6f74381935c3457d4d61f31 [ 837.032249] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.147637] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 93faf0ea25904344b3a999ba32b193ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 837.178720] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93faf0ea25904344b3a999ba32b193ec [ 837.314119] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.441790] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.442358] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg a328e28e4adb490e804220fbb29a26ce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 837.454547] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a328e28e4adb490e804220fbb29a26ce [ 837.482236] env[62109]: DEBUG nova.compute.utils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 837.482863] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg b5dd21ef374943deb000c03187095bf7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 837.484031] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 837.484130] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 837.494465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5dd21ef374943deb000c03187095bf7 [ 837.533594] env[62109]: DEBUG nova.policy [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6349e1aff7d945a6a471b1f4e826b23f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61a866168186462d9d849072a1ff25f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 837.675620] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.675868] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.676029] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.676219] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.676362] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.676504] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.676706] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.676858] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.677019] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.677176] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.677344] env[62109]: DEBUG nova.virt.hardware [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.679648] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb08213-e0ef-4230-acd1-305cd5942b42 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.696286] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae81b34-737d-4612-85e7-974f68152867 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.710022] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.715628] env[62109]: DEBUG oslo.service.loopingcall [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.720131] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 837.720527] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf7919bb-8c46-43e2-bd47-b2297160e8eb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.741118] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.741118] env[62109]: value = "task-401485" [ 837.741118] env[62109]: _type = "Task" [ 837.741118] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.745787] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4576e167-e4b8-4538-abec-c31d5c7598b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.752455] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401485, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.754728] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ed54a2-1e49-4b14-8c1d-29425dcd3d78 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.787659] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17bd0e0-a00b-44ea-a997-3dfc0f8fd430 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.794655] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1aab2a1-e966-4036-abb8-7f370481f71f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.809646] env[62109]: DEBUG nova.compute.provider_tree [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.810166] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg fd9265de8706475aa017a4f084e98f14 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 837.817100] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd9265de8706475aa017a4f084e98f14 [ 837.859708] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Successfully created port: f608a2e6-985c-4133-bb07-9b62be09d8d9 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.945518] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Releasing lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.946106] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 837.946653] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 837.946763] env[62109]: DEBUG oslo_concurrency.lockutils [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] Acquired lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.946917] env[62109]: DEBUG nova.network.neutron [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Refreshing network info cache for port ae7e19e9-1d82-47c1-96f4-06020432387b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 837.947330] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] Expecting reply to msg 8439ec6daee0495e86539cc62b09828b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 837.948166] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd337dcc-1320-4aaf-a5b4-5bd0693d426a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.957988] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdc1cc2-89ef-45ed-ac1b-cd0d459b5a94 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.969175] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8439ec6daee0495e86539cc62b09828b [ 837.983766] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 47b83dbe-d7d8-4875-bb79-95a8fecf4028 could not be found. [ 837.983974] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 837.984162] env[62109]: INFO nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Took 0.04 seconds to destroy the instance on the hypervisor. [ 837.984397] env[62109]: DEBUG oslo.service.loopingcall [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.984642] env[62109]: DEBUG nova.compute.manager [-] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 837.984734] env[62109]: DEBUG nova.network.neutron [-] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 837.986793] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 837.988494] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 8a7da379eab34063b364acbf576a2b81 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 838.006274] env[62109]: DEBUG nova.network.neutron [-] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.006777] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b801ffc093f64e22a5930484dec2d4f6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 838.014523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b801ffc093f64e22a5930484dec2d4f6 [ 838.021569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a7da379eab34063b364acbf576a2b81 [ 838.250996] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401485, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.312645] env[62109]: DEBUG nova.scheduler.client.report [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 838.315296] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg a086a8b3d36941cf90a9707819efeb3b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 838.329344] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a086a8b3d36941cf90a9707819efeb3b [ 838.473111] env[62109]: DEBUG nova.network.neutron [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.493424] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 100c4f8e3d49400dba5f47de7b20f282 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 838.508388] env[62109]: DEBUG nova.network.neutron [-] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.508908] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 884beb0e94c84822a7f6f72031fbf40e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 838.516930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 884beb0e94c84822a7f6f72031fbf40e [ 838.530120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 100c4f8e3d49400dba5f47de7b20f282 [ 838.551049] env[62109]: DEBUG nova.network.neutron [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.551550] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] Expecting reply to msg 9bb0d706109b49f0b08fc618eeb83ccf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 838.558679] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bb0d706109b49f0b08fc618eeb83ccf [ 838.752300] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401485, 'name': CreateVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.818423] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.852s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.819088] env[62109]: ERROR nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Traceback (most recent call last): [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self.driver.spawn(context, instance, image_meta, [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] vm_ref = self.build_virtual_machine(instance, [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 838.819088] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] for vif in network_info: [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] return self._sync_wrapper(fn, *args, **kwargs) [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self.wait() [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self[:] = self._gt.wait() [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] return self._exit_event.wait() [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] result = hub.switch() [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 838.819434] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] return self.greenlet.switch() [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] result = function(*args, **kwargs) [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] return func(*args, **kwargs) [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] raise e [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] nwinfo = self.network_api.allocate_for_instance( [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] created_port_ids = self._update_ports_for_instance( [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] with excutils.save_and_reraise_exception(): [ 838.819816] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] self.force_reraise() [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] raise self.value [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] updated_port = self._update_port( [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] _ensure_no_port_binding_failure(port) [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] raise exception.PortBindingFailed(port_id=port['id']) [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] nova.exception.PortBindingFailed: Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. [ 838.820117] env[62109]: ERROR nova.compute.manager [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] [ 838.820393] env[62109]: DEBUG nova.compute.utils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 838.821900] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.624s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.823333] env[62109]: INFO nova.compute.claims [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.824897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 24db7eee1a8b42b5bc9cde2d04a7c12f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 838.826054] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Build of instance a04d014e-bed6-4e4b-a5eb-316d88c174f0 was re-scheduled: Binding failed for port bb0cdfe1-f3b4-46b1-a6e1-1170d0541113, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 838.826494] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 838.826720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquiring lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.826863] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Acquired lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.827017] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 838.827375] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg cfc2b69badb7407a8a22efbccf812786 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 838.833283] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfc2b69badb7407a8a22efbccf812786 [ 838.838162] env[62109]: ERROR nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. [ 838.838162] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 838.838162] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 838.838162] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 838.838162] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.838162] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 838.838162] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.838162] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 838.838162] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.838162] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 838.838162] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.838162] env[62109]: ERROR nova.compute.manager raise self.value [ 838.838162] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.838162] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 838.838162] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.838162] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 838.838568] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.838568] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 838.838568] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. [ 838.838568] env[62109]: ERROR nova.compute.manager [ 838.838568] env[62109]: Traceback (most recent call last): [ 838.838568] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 838.838568] env[62109]: listener.cb(fileno) [ 838.838568] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 838.838568] env[62109]: result = function(*args, **kwargs) [ 838.838568] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 838.838568] env[62109]: return func(*args, **kwargs) [ 838.838568] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 838.838568] env[62109]: raise e [ 838.838568] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 838.838568] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 838.838568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.838568] env[62109]: created_port_ids = self._update_ports_for_instance( [ 838.838568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.838568] env[62109]: with excutils.save_and_reraise_exception(): [ 838.838568] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.838568] env[62109]: self.force_reraise() [ 838.838568] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.838568] env[62109]: raise self.value [ 838.838568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.838568] env[62109]: updated_port = self._update_port( [ 838.838568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.838568] env[62109]: _ensure_no_port_binding_failure(port) [ 838.838568] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.838568] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 838.839329] env[62109]: nova.exception.PortBindingFailed: Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. [ 838.839329] env[62109]: Removing descriptor: 19 [ 838.864633] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24db7eee1a8b42b5bc9cde2d04a7c12f [ 838.996356] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 839.013023] env[62109]: INFO nova.compute.manager [-] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Took 1.03 seconds to deallocate network for instance. [ 839.015321] env[62109]: DEBUG nova.compute.claims [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 839.015504] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.023166] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 839.023396] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 839.023551] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.023727] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 839.023870] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.024030] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 839.024246] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 839.024415] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 839.024640] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 839.024813] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 839.024986] env[62109]: DEBUG nova.virt.hardware [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 839.025826] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3414265e-3e7d-4faa-aeba-d4be9855ecc5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.034004] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9e9927-ebd1-49fb-8211-12fde74d6237 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.049694] env[62109]: ERROR nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Traceback (most recent call last): [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] yield resources [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self.driver.spawn(context, instance, image_meta, [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] vm_ref = self.build_virtual_machine(instance, [ 839.049694] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] vif_infos = vmwarevif.get_vif_info(self._session, [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] for vif in network_info: [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] return self._sync_wrapper(fn, *args, **kwargs) [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self.wait() [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self[:] = self._gt.wait() [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] return self._exit_event.wait() [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 839.050051] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] current.throw(*self._exc) [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] result = function(*args, **kwargs) [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] return func(*args, **kwargs) [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] raise e [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] nwinfo = self.network_api.allocate_for_instance( [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] created_port_ids = self._update_ports_for_instance( [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] with excutils.save_and_reraise_exception(): [ 839.050612] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self.force_reraise() [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] raise self.value [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] updated_port = self._update_port( [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] _ensure_no_port_binding_failure(port) [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] raise exception.PortBindingFailed(port_id=port['id']) [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] nova.exception.PortBindingFailed: Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. [ 839.050997] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] [ 839.051250] env[62109]: INFO nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Terminating instance [ 839.053038] env[62109]: DEBUG oslo_concurrency.lockutils [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] Releasing lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.053263] env[62109]: DEBUG nova.compute.manager [req-3d7b961e-622a-4c67-9c47-88a9436dfcb9 req-8cac146c-ea94-47e8-aae1-b86db4943eda service nova] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Received event network-vif-deleted-ae7e19e9-1d82-47c1-96f4-06020432387b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 839.054613] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.054770] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.054933] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.055340] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 7a0a828cbcc84c56a3e734c9066f6237 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 839.056947] env[62109]: DEBUG nova.compute.manager [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Received event network-changed-f608a2e6-985c-4133-bb07-9b62be09d8d9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 839.056947] env[62109]: DEBUG nova.compute.manager [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Refreshing instance network info cache due to event network-changed-f608a2e6-985c-4133-bb07-9b62be09d8d9. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 839.057174] env[62109]: DEBUG oslo_concurrency.lockutils [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] Acquiring lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.063024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a0a828cbcc84c56a3e734c9066f6237 [ 839.252810] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401485, 'name': CreateVM_Task, 'duration_secs': 1.279203} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.252978] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 839.253382] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.253532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.253854] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.254103] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d648270d-0a34-46dd-8b01-62b77f056b8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.258579] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 839.258579] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5261ffb5-37aa-5faa-900f-596e742d3caa" [ 839.258579] env[62109]: _type = "Task" [ 839.258579] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.266106] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5261ffb5-37aa-5faa-900f-596e742d3caa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.330489] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 614a0b10db47441caeb6bfe2b8619bc7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 839.338602] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 614a0b10db47441caeb6bfe2b8619bc7 [ 839.347631] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.436560] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.437098] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg abcb7ec3addf473f9f905c396b5b8995 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 839.445458] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abcb7ec3addf473f9f905c396b5b8995 [ 839.572982] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.639342] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.639860] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 85ece0fa9bed476b8fe0f537751eff20 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 839.648503] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85ece0fa9bed476b8fe0f537751eff20 [ 839.768495] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5261ffb5-37aa-5faa-900f-596e742d3caa, 'name': SearchDatastore_Task, 'duration_secs': 0.010042} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.768646] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.769522] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.769522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.769522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.769522] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.769777] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bd1be63-a34e-4aaf-ba44-ecd6ec0486ea {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.777487] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.777656] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 839.778683] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48a4d7f0-28d5-4d76-a0a2-436c8c01283f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.790681] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 839.790681] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52b5839b-3aaa-eb29-cd55-ea4d2392fa28" [ 839.790681] env[62109]: _type = "Task" [ 839.790681] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.801091] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52b5839b-3aaa-eb29-cd55-ea4d2392fa28, 'name': SearchDatastore_Task, 'duration_secs': 0.008501} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.801827] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eda73a7f-922b-4b1d-86a5-2613fa934f61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.806460] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 839.806460] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52c2e0c1-f177-c7b8-3222-0c8a82d53cfb" [ 839.806460] env[62109]: _type = "Task" [ 839.806460] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.814182] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52c2e0c1-f177-c7b8-3222-0c8a82d53cfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.939286] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Releasing lock "refresh_cache-a04d014e-bed6-4e4b-a5eb-316d88c174f0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.939390] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 839.939575] env[62109]: DEBUG nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 839.939741] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 839.972898] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.973512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 8ec739b5e57a4c3493796ac86a7466b7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 839.985654] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ec739b5e57a4c3493796ac86a7466b7 [ 840.102714] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a983c484-df4e-40e5-941c-9bfa9a119abe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.110261] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264a3695-0a4f-40b5-a977-39bb3be3eb01 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.141548] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbd1f4d-ea4e-4434-a6f5-1fdc2f867edc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.144999] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.145394] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 840.145769] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 840.146102] env[62109]: DEBUG oslo_concurrency.lockutils [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] Acquired lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.146279] env[62109]: DEBUG nova.network.neutron [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Refreshing network info cache for port f608a2e6-985c-4133-bb07-9b62be09d8d9 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 840.148182] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] Expecting reply to msg 22b4466b2abf43b19c97e95edfdb7e2a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 840.148972] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7369a028-c46a-4164-9745-b2f2a3e9dbe6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.156182] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22b4466b2abf43b19c97e95edfdb7e2a [ 840.158854] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1506dce-c6b6-4866-9db7-d0b848e83400 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.168776] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72211fe8-86d1-4064-bb35-4d4ee2bd033a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.189311] env[62109]: DEBUG nova.compute.provider_tree [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.189825] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 75c15050fa28478c9e20bcebfc4c09cc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 840.195359] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa1afca5-8194-4a9d-bcd0-e3e91c15338c could not be found. [ 840.195578] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 840.195756] env[62109]: INFO nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 840.195988] env[62109]: DEBUG oslo.service.loopingcall [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.196421] env[62109]: DEBUG nova.compute.manager [-] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 840.196512] env[62109]: DEBUG nova.network.neutron [-] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 840.198466] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75c15050fa28478c9e20bcebfc4c09cc [ 840.215365] env[62109]: DEBUG nova.network.neutron [-] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.215889] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 078a75768b5a4f858f3ce82b09d9c774 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 840.222489] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 078a75768b5a4f858f3ce82b09d9c774 [ 840.316512] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52c2e0c1-f177-c7b8-3222-0c8a82d53cfb, 'name': SearchDatastore_Task, 'duration_secs': 0.007881} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.316776] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.317033] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad/900e1e1e-5635-4782-bd87-046dd2af7dad.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 840.317293] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a8763ab-2eeb-4a89-8edc-4a83eba63557 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.324110] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 840.324110] env[62109]: value = "task-401486" [ 840.324110] env[62109]: _type = "Task" [ 840.324110] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.331445] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.475560] env[62109]: DEBUG nova.network.neutron [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.476512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 8171ac2f7ea54d78bdb0073ba1f5ca2b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 840.485886] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8171ac2f7ea54d78bdb0073ba1f5ca2b [ 840.669428] env[62109]: DEBUG nova.network.neutron [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.697695] env[62109]: DEBUG nova.scheduler.client.report [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 840.700276] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 0868290650a74afa83ca563a85034f90 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 840.718077] env[62109]: DEBUG nova.network.neutron [-] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.718586] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 393e4fd7adea4a69be860da609ace9e1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 840.719714] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0868290650a74afa83ca563a85034f90 [ 840.728584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 393e4fd7adea4a69be860da609ace9e1 [ 840.776114] env[62109]: DEBUG nova.network.neutron [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.776657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] Expecting reply to msg dceb7f5dda7946c585a7aa3394f9c603 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 840.783822] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dceb7f5dda7946c585a7aa3394f9c603 [ 840.836192] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401486, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434957} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.836657] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad/900e1e1e-5635-4782-bd87-046dd2af7dad.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 840.836913] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.837160] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aed4c17c-12e6-4842-bf62-7f58ee52736b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.844435] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 840.844435] env[62109]: value = "task-401487" [ 840.844435] env[62109]: _type = "Task" [ 840.844435] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.852296] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.979503] env[62109]: INFO nova.compute.manager [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] [instance: a04d014e-bed6-4e4b-a5eb-316d88c174f0] Took 1.04 seconds to deallocate network for instance. [ 840.981618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 3e95eef1f8024c879b57528ae393a7a6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 841.025386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e95eef1f8024c879b57528ae393a7a6 [ 841.203189] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.203704] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 841.205953] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 73c0b95a8f204106bd7c80fcc30dfc11 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 841.206967] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.379s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.208607] env[62109]: INFO nova.compute.claims [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.210085] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 737e705aaaeb4266a08ba6d4e869342c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 841.220656] env[62109]: INFO nova.compute.manager [-] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Took 1.02 seconds to deallocate network for instance. [ 841.222766] env[62109]: DEBUG nova.compute.claims [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 841.222930] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.263972] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 737e705aaaeb4266a08ba6d4e869342c [ 841.279829] env[62109]: DEBUG oslo_concurrency.lockutils [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] Releasing lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.279829] env[62109]: DEBUG nova.compute.manager [req-59758b09-1995-4afb-8634-400c3b430621 req-78741c85-108c-471e-9f94-0eba266b0fd9 service nova] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Received event network-vif-deleted-f608a2e6-985c-4133-bb07-9b62be09d8d9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 841.279829] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73c0b95a8f204106bd7c80fcc30dfc11 [ 841.354508] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083966} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.354797] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.355632] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee4db71-a016-42b6-b073-c9de6554a881 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.377096] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad/900e1e1e-5635-4782-bd87-046dd2af7dad.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.377460] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67d9f7dd-eb4b-42e2-ac4b-fca93dddacf6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.398489] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 841.398489] env[62109]: value = "task-401488" [ 841.398489] env[62109]: _type = "Task" [ 841.398489] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.407365] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401488, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.485936] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 7ac45709c2ff4f3c94e3a040f400c0eb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 841.542108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ac45709c2ff4f3c94e3a040f400c0eb [ 841.719650] env[62109]: DEBUG nova.compute.utils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 841.719650] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg e9872cfa7e7541bdb3c47699d0c7b2a1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 841.719650] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 71a9124b78ca40d1bdeb75f8eb559074 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 841.719650] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 841.719650] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 841.723035] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71a9124b78ca40d1bdeb75f8eb559074 [ 841.728471] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9872cfa7e7541bdb3c47699d0c7b2a1 [ 841.768415] env[62109]: DEBUG nova.policy [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '725320be5ef84e56a02daff05e075ac5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1ff92bed39f4c93ad77ea3d3e398a4a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 841.907850] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401488, 'name': ReconfigVM_Task, 'duration_secs': 0.275926} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.908529] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad/900e1e1e-5635-4782-bd87-046dd2af7dad.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.909862] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1b33852-22c9-4d2b-bb8b-f7f31adf8222 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.916403] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 841.916403] env[62109]: value = "task-401489" [ 841.916403] env[62109]: _type = "Task" [ 841.916403] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.926700] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401489, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.024405] env[62109]: INFO nova.scheduler.client.report [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Deleted allocations for instance a04d014e-bed6-4e4b-a5eb-316d88c174f0 [ 842.025906] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Expecting reply to msg 73fe3096a3db4ee1a45315815dc5e7c6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 842.046808] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73fe3096a3db4ee1a45315815dc5e7c6 [ 842.122519] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Successfully created port: caac7c6a-04c0-41de-8a4c-52c979260348 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.218060] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 842.219967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 6e1689a21381451599c9109ef26c9914 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 842.287014] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e1689a21381451599c9109ef26c9914 [ 842.429115] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401489, 'name': Rename_Task, 'duration_secs': 0.147061} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.429115] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 842.429115] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ff7c486-b98e-4856-8303-77475ddfb2c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.434302] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 842.434302] env[62109]: value = "task-401490" [ 842.434302] env[62109]: _type = "Task" [ 842.434302] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.452457] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401490, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.516232] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f8813f-af2a-4d49-bf9a-0501c2866633 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.524469] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11f775b-1d9e-4e32-9dd8-c10f6c9244a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.528574] env[62109]: DEBUG oslo_concurrency.lockutils [None req-39c2b136-fb3e-44f7-ac09-088b8f850b73 tempest-AttachVolumeShelveTestJSON-1463436007 tempest-AttachVolumeShelveTestJSON-1463436007-project-member] Lock "a04d014e-bed6-4e4b-a5eb-316d88c174f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.176s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.529446] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg c1b7b34ad4e1429689e35ab66c79a4a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 842.561649] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1b7b34ad4e1429689e35ab66c79a4a5 [ 842.562466] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 842.564059] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg fc20f262b5be4c0d85be4f18de1364fa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 842.566080] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f69e941-c837-49f6-bea6-887333e599f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.573524] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ec30bc-07b0-45d4-b8e4-01f358606272 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.588500] env[62109]: DEBUG nova.compute.provider_tree [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.589040] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 003b88282cba4215bd18a67b13b05bcf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 842.596558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 003b88282cba4215bd18a67b13b05bcf [ 842.597094] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc20f262b5be4c0d85be4f18de1364fa [ 842.730108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 0ced38efa99c4202aad8e39a8a657dff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 842.765262] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ced38efa99c4202aad8e39a8a657dff [ 842.892083] env[62109]: DEBUG nova.compute.manager [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Received event network-changed-caac7c6a-04c0-41de-8a4c-52c979260348 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 842.892279] env[62109]: DEBUG nova.compute.manager [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Refreshing instance network info cache due to event network-changed-caac7c6a-04c0-41de-8a4c-52c979260348. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 842.892493] env[62109]: DEBUG oslo_concurrency.lockutils [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] Acquiring lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.892632] env[62109]: DEBUG oslo_concurrency.lockutils [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] Acquired lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.892788] env[62109]: DEBUG nova.network.neutron [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Refreshing network info cache for port caac7c6a-04c0-41de-8a4c-52c979260348 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 842.893198] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] Expecting reply to msg 65f0e45536c44a8e862a4b4191af309d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 842.900937] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65f0e45536c44a8e862a4b4191af309d [ 842.946378] env[62109]: DEBUG oslo_vmware.api [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401490, 'name': PowerOnVM_Task, 'duration_secs': 0.412698} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.946637] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 842.946974] env[62109]: DEBUG nova.compute.manager [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 842.947625] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2385cb2f-b9e6-4d36-9d1c-b497b4883c03 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.955132] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg c7198c0d7d9c407e8d97688b1911d71c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 842.994978] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7198c0d7d9c407e8d97688b1911d71c [ 843.082992] env[62109]: ERROR nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. [ 843.082992] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 843.082992] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 843.082992] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 843.082992] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 843.082992] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 843.082992] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 843.082992] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 843.082992] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 843.082992] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 843.082992] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 843.082992] env[62109]: ERROR nova.compute.manager raise self.value [ 843.082992] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 843.082992] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 843.082992] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 843.082992] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 843.083470] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 843.083470] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 843.083470] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. [ 843.083470] env[62109]: ERROR nova.compute.manager [ 843.083470] env[62109]: Traceback (most recent call last): [ 843.083470] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 843.083470] env[62109]: listener.cb(fileno) [ 843.083470] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 843.083470] env[62109]: result = function(*args, **kwargs) [ 843.083470] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 843.083470] env[62109]: return func(*args, **kwargs) [ 843.083470] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 843.083470] env[62109]: raise e [ 843.083470] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 843.083470] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 843.083470] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 843.083470] env[62109]: created_port_ids = self._update_ports_for_instance( [ 843.083470] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 843.083470] env[62109]: with excutils.save_and_reraise_exception(): [ 843.083470] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 843.083470] env[62109]: self.force_reraise() [ 843.083470] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 843.083470] env[62109]: raise self.value [ 843.083470] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 843.083470] env[62109]: updated_port = self._update_port( [ 843.083470] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 843.083470] env[62109]: _ensure_no_port_binding_failure(port) [ 843.083470] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 843.083470] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 843.084155] env[62109]: nova.exception.PortBindingFailed: Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. [ 843.084155] env[62109]: Removing descriptor: 19 [ 843.085719] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.094616] env[62109]: DEBUG nova.scheduler.client.report [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 843.094616] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg ae871945a4be49f68b3434e95952fd8c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 843.105906] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae871945a4be49f68b3434e95952fd8c [ 843.236768] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 843.263660] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 843.263822] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 843.263875] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.264124] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 843.264303] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.264442] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 843.264649] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 843.265200] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 843.265200] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 843.265200] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 843.265346] env[62109]: DEBUG nova.virt.hardware [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 843.266143] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd245dc2-314d-4121-9a50-70d5d52c5d54 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.274038] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c7b1ea-04f5-40cb-aaf5-5334c6cab840 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.288617] env[62109]: ERROR nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Traceback (most recent call last): [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] yield resources [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self.driver.spawn(context, instance, image_meta, [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] vm_ref = self.build_virtual_machine(instance, [ 843.288617] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] vif_infos = vmwarevif.get_vif_info(self._session, [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] for vif in network_info: [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] return self._sync_wrapper(fn, *args, **kwargs) [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self.wait() [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self[:] = self._gt.wait() [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] return self._exit_event.wait() [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 843.288988] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] current.throw(*self._exc) [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] result = function(*args, **kwargs) [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] return func(*args, **kwargs) [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] raise e [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] nwinfo = self.network_api.allocate_for_instance( [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] created_port_ids = self._update_ports_for_instance( [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] with excutils.save_and_reraise_exception(): [ 843.289336] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self.force_reraise() [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] raise self.value [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] updated_port = self._update_port( [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] _ensure_no_port_binding_failure(port) [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] raise exception.PortBindingFailed(port_id=port['id']) [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] nova.exception.PortBindingFailed: Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. [ 843.289647] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] [ 843.289647] env[62109]: INFO nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Terminating instance [ 843.290974] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Acquiring lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.420490] env[62109]: DEBUG nova.network.neutron [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.463004] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.519676] env[62109]: DEBUG nova.network.neutron [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.520319] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] Expecting reply to msg 241adb000e594246891a129ded2bf44a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 843.528119] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 241adb000e594246891a129ded2bf44a [ 843.600585] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.600585] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 843.600585] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 48a4d176c71d4489ba04d241d6a08e0e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 843.601710] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.953s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.603487] env[62109]: INFO nova.compute.claims [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 843.605462] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 6b4d3c0481e541d18e4f7c263bccd9ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 843.633659] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48a4d176c71d4489ba04d241d6a08e0e [ 843.639481] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b4d3c0481e541d18e4f7c263bccd9ca [ 843.757746] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg c39881285d8c4d3f93e148783cc1e533 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 843.777779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c39881285d8c4d3f93e148783cc1e533 [ 844.024750] env[62109]: DEBUG oslo_concurrency.lockutils [req-23938e66-2dec-48d8-81a1-079fc785841e req-2cd2303b-d1cd-4ff2-9834-59efcb26b80f service nova] Releasing lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.024750] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Acquired lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.024750] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 844.024750] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 05d31890678248cb964cce8d555e2f2f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 844.032624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05d31890678248cb964cce8d555e2f2f [ 844.113778] env[62109]: DEBUG nova.compute.utils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 844.113778] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg ce32057903a24c318b4b906697b3a199 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 844.116189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 93f52f84a2e74866a78085fbc439770c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 844.116189] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 844.116189] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 844.125373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93f52f84a2e74866a78085fbc439770c [ 844.135373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce32057903a24c318b4b906697b3a199 [ 844.233329] env[62109]: DEBUG nova.policy [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '793e0ffc9f9c447da4cefaf315b15eb7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea0635c0a7d846df93cd8dd342486acd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 844.272847] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "900e1e1e-5635-4782-bd87-046dd2af7dad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.273108] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "900e1e1e-5635-4782-bd87-046dd2af7dad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.273310] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "900e1e1e-5635-4782-bd87-046dd2af7dad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.273480] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "900e1e1e-5635-4782-bd87-046dd2af7dad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.273637] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "900e1e1e-5635-4782-bd87-046dd2af7dad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.275895] env[62109]: INFO nova.compute.manager [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Terminating instance [ 844.277731] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "refresh_cache-900e1e1e-5635-4782-bd87-046dd2af7dad" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.277881] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired lock "refresh_cache-900e1e1e-5635-4782-bd87-046dd2af7dad" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.278113] env[62109]: DEBUG nova.network.neutron [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 844.278513] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 3c04c8834b594128823f437f6e0f99db in queue reply_7522b64acfeb4981b1f36928b040d568 [ 844.285412] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c04c8834b594128823f437f6e0f99db [ 845.217699] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Successfully created port: e91962f5-3d5c-4d08-af11-4df2df76c337 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.224977] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 845.227246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 6b093fc226da44e1a5a0813681e95572 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 845.237272] env[62109]: DEBUG nova.compute.manager [req-8d92d804-5d25-439a-8658-2a18b3c18dd8 req-a6f2d240-80e4-415a-b7b6-74fbfc70e40d service nova] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Received event network-vif-deleted-caac7c6a-04c0-41de-8a4c-52c979260348 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 845.238838] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Acquiring lock "a276656a-67b0-4ceb-918f-cfb323ed09fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.242010] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Lock "a276656a-67b0-4ceb-918f-cfb323ed09fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.272675] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 845.274934] env[62109]: DEBUG nova.network.neutron [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 845.276739] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b093fc226da44e1a5a0813681e95572 [ 845.431460] env[62109]: DEBUG nova.network.neutron [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.432173] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 469f6397f1e84572ad24107cbc71173d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 845.439630] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.440155] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 4378f261afaa4bea97037de91e8a3e0e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 845.444189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 469f6397f1e84572ad24107cbc71173d [ 845.450028] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4378f261afaa4bea97037de91e8a3e0e [ 845.534203] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6399409f-92b0-4ed4-893c-7a59a0cbfda1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.542301] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc77b0c6-5563-46c7-b59a-4cc7784d60d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.582992] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0be6b0a-d108-4afe-857e-1ffd0dfbd235 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.592448] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e22c1d-608a-408e-a319-50d008259e3f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.608108] env[62109]: DEBUG nova.compute.provider_tree [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.608693] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 3531180e2f9a416687a5aa585d782f53 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 845.616198] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3531180e2f9a416687a5aa585d782f53 [ 845.740342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg a931c3df4e694bbd8f7237859c587a23 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 845.784723] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a931c3df4e694bbd8f7237859c587a23 [ 845.936731] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Releasing lock "refresh_cache-900e1e1e-5635-4782-bd87-046dd2af7dad" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.937173] env[62109]: DEBUG nova.compute.manager [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 845.937385] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 845.938339] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49fa590-2241-4c5e-a56b-cfbd58eb8a3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.943196] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Releasing lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.943196] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 845.943196] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 845.943196] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d40e81f-6f8f-44d1-bde3-b89230e906bf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.949521] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 845.950738] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-427f1803-3f92-4101-a031-82b34d4b974c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.954894] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aafdf86-b891-4842-bb51-dc8414a98f42 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.970133] env[62109]: DEBUG oslo_vmware.api [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 845.970133] env[62109]: value = "task-401491" [ 845.970133] env[62109]: _type = "Task" [ 845.970133] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.978233] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 732cf1e3-823d-4769-ad16-f5b492be53d5 could not be found. [ 845.978453] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 845.978629] env[62109]: INFO nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 845.978865] env[62109]: DEBUG oslo.service.loopingcall [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.981812] env[62109]: DEBUG nova.compute.manager [-] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 845.981935] env[62109]: DEBUG nova.network.neutron [-] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 845.984375] env[62109]: DEBUG oslo_vmware.api [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401491, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.002472] env[62109]: DEBUG nova.network.neutron [-] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.003020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dd3580c2003d4a20b0134cb85c43d722 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 846.010296] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd3580c2003d4a20b0134cb85c43d722 [ 846.115318] env[62109]: DEBUG nova.scheduler.client.report [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.117771] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 407f1675ca3640d6835a49a048d7c000 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 846.135609] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 407f1675ca3640d6835a49a048d7c000 [ 846.244560] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 846.270547] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.270795] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.270947] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.271129] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.271274] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.271418] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.271792] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.271956] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.272125] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.272288] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.272453] env[62109]: DEBUG nova.virt.hardware [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.273302] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d7988a-fb4f-4f88-860a-aa4795fe6747 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.282482] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c225d29-a7af-449f-bd78-bd151786a906 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.421247] env[62109]: ERROR nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. [ 846.421247] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 846.421247] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.421247] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 846.421247] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.421247] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 846.421247] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.421247] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 846.421247] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.421247] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 846.421247] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.421247] env[62109]: ERROR nova.compute.manager raise self.value [ 846.421247] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.421247] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 846.421247] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.421247] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 846.421733] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.421733] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 846.421733] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. [ 846.421733] env[62109]: ERROR nova.compute.manager [ 846.421733] env[62109]: Traceback (most recent call last): [ 846.421733] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 846.421733] env[62109]: listener.cb(fileno) [ 846.421733] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 846.421733] env[62109]: result = function(*args, **kwargs) [ 846.421733] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 846.421733] env[62109]: return func(*args, **kwargs) [ 846.421733] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 846.421733] env[62109]: raise e [ 846.421733] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.421733] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 846.421733] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.421733] env[62109]: created_port_ids = self._update_ports_for_instance( [ 846.421733] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.421733] env[62109]: with excutils.save_and_reraise_exception(): [ 846.421733] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.421733] env[62109]: self.force_reraise() [ 846.421733] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.421733] env[62109]: raise self.value [ 846.421733] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.421733] env[62109]: updated_port = self._update_port( [ 846.421733] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.421733] env[62109]: _ensure_no_port_binding_failure(port) [ 846.421733] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.421733] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 846.422419] env[62109]: nova.exception.PortBindingFailed: Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. [ 846.422419] env[62109]: Removing descriptor: 19 [ 846.422419] env[62109]: ERROR nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Traceback (most recent call last): [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] yield resources [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self.driver.spawn(context, instance, image_meta, [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 846.422419] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] vm_ref = self.build_virtual_machine(instance, [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] vif_infos = vmwarevif.get_vif_info(self._session, [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] for vif in network_info: [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] return self._sync_wrapper(fn, *args, **kwargs) [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self.wait() [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self[:] = self._gt.wait() [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] return self._exit_event.wait() [ 846.422712] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] result = hub.switch() [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] return self.greenlet.switch() [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] result = function(*args, **kwargs) [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] return func(*args, **kwargs) [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] raise e [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] nwinfo = self.network_api.allocate_for_instance( [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.423017] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] created_port_ids = self._update_ports_for_instance( [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] with excutils.save_and_reraise_exception(): [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self.force_reraise() [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] raise self.value [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] updated_port = self._update_port( [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] _ensure_no_port_binding_failure(port) [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.423323] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] raise exception.PortBindingFailed(port_id=port['id']) [ 846.423607] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] nova.exception.PortBindingFailed: Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. [ 846.423607] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] [ 846.423607] env[62109]: INFO nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Terminating instance [ 846.424693] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Acquiring lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.424886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Acquired lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.425087] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 846.425536] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 304ad636a48d4edf85d959261068eb84 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 846.432492] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 304ad636a48d4edf85d959261068eb84 [ 846.481345] env[62109]: DEBUG oslo_vmware.api [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401491, 'name': PowerOffVM_Task, 'duration_secs': 0.120063} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.481615] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 846.481780] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 846.482025] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-137c3f75-6993-456b-98d9-f139684c2c8a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.505621] env[62109]: DEBUG nova.network.neutron [-] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.506134] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1c0102ca07f94971b6434607801a0b64 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 846.513573] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 846.513924] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 846.513985] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Deleting the datastore file [datastore1] 900e1e1e-5635-4782-bd87-046dd2af7dad {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 846.514605] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c0102ca07f94971b6434607801a0b64 [ 846.514979] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-701fe4c7-d7cc-4431-9725-27af160ea4a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.522162] env[62109]: DEBUG oslo_vmware.api [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 846.522162] env[62109]: value = "task-401493" [ 846.522162] env[62109]: _type = "Task" [ 846.522162] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.530529] env[62109]: DEBUG oslo_vmware.api [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401493, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.620864] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.019s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.621418] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 846.623294] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 55e946c7ea5d44ef97b23c3750c5ac57 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 846.626285] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.794s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.628269] env[62109]: INFO nova.compute.claims [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.630138] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg e819b44d50c04ba689e8940a4db691dc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 846.662906] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55e946c7ea5d44ef97b23c3750c5ac57 [ 846.668266] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e819b44d50c04ba689e8940a4db691dc [ 846.953998] env[62109]: DEBUG nova.compute.manager [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Received event network-changed-e91962f5-3d5c-4d08-af11-4df2df76c337 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 846.954195] env[62109]: DEBUG nova.compute.manager [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Refreshing instance network info cache due to event network-changed-e91962f5-3d5c-4d08-af11-4df2df76c337. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 846.954385] env[62109]: DEBUG oslo_concurrency.lockutils [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] Acquiring lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.964461] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.008583] env[62109]: INFO nova.compute.manager [-] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Took 1.03 seconds to deallocate network for instance. [ 847.010859] env[62109]: DEBUG nova.compute.claims [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 847.011074] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.031538] env[62109]: DEBUG oslo_vmware.api [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401493, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099045} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.031991] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 847.032958] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 847.032958] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 847.032958] env[62109]: INFO nova.compute.manager [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Took 1.10 seconds to destroy the instance on the hypervisor. [ 847.032958] env[62109]: DEBUG oslo.service.loopingcall [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.033187] env[62109]: DEBUG nova.compute.manager [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 847.033187] env[62109]: DEBUG nova.network.neutron [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 847.037202] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.037671] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 327a0d5d93924cd5b8f2d397463b81b3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.046875] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 327a0d5d93924cd5b8f2d397463b81b3 [ 847.052170] env[62109]: DEBUG nova.network.neutron [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.052697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg be4b7d8e29a342559c3d7e75db360e08 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.061378] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be4b7d8e29a342559c3d7e75db360e08 [ 847.133671] env[62109]: DEBUG nova.compute.utils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 847.134271] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 7ec632a58d644914a30ea8b0e854a234 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.137316] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 0cf84497653f455090abcaa2f8091ab2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.139291] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 847.139291] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 847.144624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cf84497653f455090abcaa2f8091ab2 [ 847.145511] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ec632a58d644914a30ea8b0e854a234 [ 847.177813] env[62109]: DEBUG nova.policy [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4c96ff95d54342919bf01d662a5cf413', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa39a5fecdee4b9ab51732c89aeb5004', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 847.488493] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Successfully created port: fcae7974-3037-4637-9e78-4b3d7df2d667 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.540538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Releasing lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.541034] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 847.541245] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 847.541546] env[62109]: DEBUG oslo_concurrency.lockutils [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] Acquired lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.541709] env[62109]: DEBUG nova.network.neutron [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Refreshing network info cache for port e91962f5-3d5c-4d08-af11-4df2df76c337 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 847.542156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] Expecting reply to msg 9c9e048be26d41f598ea072f7952cded in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.543519] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-924f576b-6fa7-421c-a8cb-7c8f811da148 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.550461] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c9e048be26d41f598ea072f7952cded [ 847.553816] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31588a4-f06a-4d65-8e69-f4eb0df6e232 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.564738] env[62109]: DEBUG nova.network.neutron [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.565168] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 603b382ada504d568aedc54926dbc462 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.573966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 603b382ada504d568aedc54926dbc462 [ 847.580116] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 436788b9-92bb-4088-9c24-c2e9a073c09d could not be found. [ 847.580331] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 847.580819] env[62109]: INFO nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 847.581080] env[62109]: DEBUG oslo.service.loopingcall [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.581308] env[62109]: DEBUG nova.compute.manager [-] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 847.581471] env[62109]: DEBUG nova.network.neutron [-] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 847.600122] env[62109]: DEBUG nova.network.neutron [-] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.600634] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c6c058a303234273853261e8a7fa4e2d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.607385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6c058a303234273853261e8a7fa4e2d [ 847.639246] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 847.641388] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 22ed840b6387405aae298523f01c6b05 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.687763] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22ed840b6387405aae298523f01c6b05 [ 847.899044] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e634c1-4f0e-4ffa-85e8-5455ae5e5cf0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.906659] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d138b27-7a82-46a5-b864-c0a2e503342f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.936873] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6a2807-1b56-4888-8f40-0c56e5993a3d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.946105] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe85068-478d-43ed-af75-ce544dd8b155 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.957328] env[62109]: DEBUG nova.compute.provider_tree [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.957837] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg ad5fa947450b4c73b70c3c66686acb74 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 847.964766] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad5fa947450b4c73b70c3c66686acb74 [ 848.062685] env[62109]: DEBUG nova.network.neutron [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.068174] env[62109]: INFO nova.compute.manager [-] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Took 1.03 seconds to deallocate network for instance. [ 848.072375] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 0e04123589f1415786090594d28aacd0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 848.102456] env[62109]: DEBUG nova.network.neutron [-] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.102456] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c4ea81bc03324c898395fcec8ff53dfa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 848.109146] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e04123589f1415786090594d28aacd0 [ 848.110504] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4ea81bc03324c898395fcec8ff53dfa [ 848.139266] env[62109]: DEBUG nova.network.neutron [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.139783] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] Expecting reply to msg 58c7891824264710ab86933f90891f2e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 848.145871] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 730847c713b54d0b9dc833997f69df30 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 848.155123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58c7891824264710ab86933f90891f2e [ 848.180193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 730847c713b54d0b9dc833997f69df30 [ 848.326328] env[62109]: DEBUG nova.compute.manager [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Received event network-changed-fcae7974-3037-4637-9e78-4b3d7df2d667 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 848.326526] env[62109]: DEBUG nova.compute.manager [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Refreshing instance network info cache due to event network-changed-fcae7974-3037-4637-9e78-4b3d7df2d667. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 848.327164] env[62109]: DEBUG oslo_concurrency.lockutils [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] Acquiring lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.327275] env[62109]: DEBUG oslo_concurrency.lockutils [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] Acquired lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.327421] env[62109]: DEBUG nova.network.neutron [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Refreshing network info cache for port fcae7974-3037-4637-9e78-4b3d7df2d667 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 848.327838] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] Expecting reply to msg 9993167e62e24eb5a903d268f4d06b1d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 848.334536] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9993167e62e24eb5a903d268f4d06b1d [ 848.378376] env[62109]: ERROR nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. [ 848.378376] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 848.378376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 848.378376] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 848.378376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 848.378376] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 848.378376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 848.378376] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 848.378376] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.378376] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 848.378376] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.378376] env[62109]: ERROR nova.compute.manager raise self.value [ 848.378376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 848.378376] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 848.378376] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.378376] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 848.378785] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.378785] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 848.378785] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. [ 848.378785] env[62109]: ERROR nova.compute.manager [ 848.378785] env[62109]: Traceback (most recent call last): [ 848.378785] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 848.378785] env[62109]: listener.cb(fileno) [ 848.378785] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 848.378785] env[62109]: result = function(*args, **kwargs) [ 848.378785] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 848.378785] env[62109]: return func(*args, **kwargs) [ 848.378785] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 848.378785] env[62109]: raise e [ 848.378785] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 848.378785] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 848.378785] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 848.378785] env[62109]: created_port_ids = self._update_ports_for_instance( [ 848.378785] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 848.378785] env[62109]: with excutils.save_and_reraise_exception(): [ 848.378785] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.378785] env[62109]: self.force_reraise() [ 848.378785] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.378785] env[62109]: raise self.value [ 848.378785] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 848.378785] env[62109]: updated_port = self._update_port( [ 848.378785] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.378785] env[62109]: _ensure_no_port_binding_failure(port) [ 848.378785] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.378785] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 848.379472] env[62109]: nova.exception.PortBindingFailed: Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. [ 848.379472] env[62109]: Removing descriptor: 16 [ 848.460563] env[62109]: DEBUG nova.scheduler.client.report [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.463047] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg e066af954d8c4bedbad2f793a18e826a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 848.474399] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e066af954d8c4bedbad2f793a18e826a [ 848.578207] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.604701] env[62109]: INFO nova.compute.manager [-] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Took 1.02 seconds to deallocate network for instance. [ 848.607280] env[62109]: DEBUG nova.compute.claims [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 848.607450] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.641628] env[62109]: DEBUG oslo_concurrency.lockutils [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] Releasing lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.641911] env[62109]: DEBUG nova.compute.manager [req-d257cdeb-5613-44a5-86ef-e7b5ad3acab1 req-0e87efb0-4e13-40f6-b287-3bb9b19347ef service nova] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Received event network-vif-deleted-e91962f5-3d5c-4d08-af11-4df2df76c337 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 848.648331] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 848.677401] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 848.677663] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 848.677872] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 848.678231] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 848.678424] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 848.678585] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 848.678815] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 848.679172] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 848.679350] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 848.679531] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 848.679705] env[62109]: DEBUG nova.virt.hardware [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 848.680592] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8841ed5-ad74-48b1-89c1-8644d45a90b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.696231] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0f0d96-5c8b-478e-bba7-ed71d89eb616 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.704550] env[62109]: ERROR nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Traceback (most recent call last): [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] yield resources [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self.driver.spawn(context, instance, image_meta, [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] vm_ref = self.build_virtual_machine(instance, [ 848.704550] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] vif_infos = vmwarevif.get_vif_info(self._session, [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] for vif in network_info: [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] return self._sync_wrapper(fn, *args, **kwargs) [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self.wait() [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self[:] = self._gt.wait() [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] return self._exit_event.wait() [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 848.704906] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] current.throw(*self._exc) [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] result = function(*args, **kwargs) [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] return func(*args, **kwargs) [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] raise e [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] nwinfo = self.network_api.allocate_for_instance( [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] created_port_ids = self._update_ports_for_instance( [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] with excutils.save_and_reraise_exception(): [ 848.705231] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self.force_reraise() [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] raise self.value [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] updated_port = self._update_port( [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] _ensure_no_port_binding_failure(port) [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] raise exception.PortBindingFailed(port_id=port['id']) [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] nova.exception.PortBindingFailed: Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. [ 848.705542] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] [ 848.705542] env[62109]: INFO nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Terminating instance [ 848.707070] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Acquiring lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.844929] env[62109]: DEBUG nova.network.neutron [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.916799] env[62109]: DEBUG nova.network.neutron [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.917361] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] Expecting reply to msg 616e4326ac564524bea38966fb7043a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 848.924933] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 616e4326ac564524bea38966fb7043a2 [ 848.965640] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.966225] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 848.968347] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 9003c965bf3e4fe9901686a199a3b334 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 848.969888] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.315s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.971860] env[62109]: INFO nova.compute.claims [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.973398] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 5ff268147c464e97a3f4656d7a5b2da2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 849.011561] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ff268147c464e97a3f4656d7a5b2da2 [ 849.019734] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9003c965bf3e4fe9901686a199a3b334 [ 849.419990] env[62109]: DEBUG oslo_concurrency.lockutils [req-879c6008-5b42-41c4-86b7-54647149d606 req-acea9eff-bf65-4cd6-9edd-7dfe883335d3 service nova] Releasing lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.420410] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Acquired lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.420603] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 849.421029] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 10bcc9fc03054d04bec3bdabf5de65e1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 849.428089] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10bcc9fc03054d04bec3bdabf5de65e1 [ 849.471210] env[62109]: DEBUG nova.compute.utils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.471903] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 12bdf09cff34476fbcab6c9349692317 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 849.473294] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 849.473538] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 849.477056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg e15f22d7c56a45c6aa6d94e6b2c49db9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 849.483828] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e15f22d7c56a45c6aa6d94e6b2c49db9 [ 849.484424] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12bdf09cff34476fbcab6c9349692317 [ 849.524314] env[62109]: DEBUG nova.policy [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23f9946393284244aca0c29201c37fa4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21cdf8675fb347c2874d912dcb8ac002', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 849.861248] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Successfully created port: e599aa10-a1f1-41b4-933c-2956e0b7e627 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.938445] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.977160] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 849.979409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 4b1c66a076fb4382aca75895f2e3aee5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 850.032680] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b1c66a076fb4382aca75895f2e3aee5 [ 850.035809] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.036548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 35b30ef5aec0410085f5b150d77117af in queue reply_7522b64acfeb4981b1f36928b040d568 [ 850.052434] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35b30ef5aec0410085f5b150d77117af [ 850.265085] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881e9adb-7ae8-4fb5-9b80-a60d72522747 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.274212] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198fcd6e-3419-4c93-927c-036532d6dfb3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.305502] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00027195-3394-484e-817f-d0d89f15e639 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.312487] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b06aa8c-8df2-48e9-881b-134b1a81a86e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.327043] env[62109]: DEBUG nova.compute.provider_tree [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.327704] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a5d3fc70f594462c85239187aa78e183 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 850.336166] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5d3fc70f594462c85239187aa78e183 [ 850.352920] env[62109]: DEBUG nova.compute.manager [req-29423ee8-27ac-4faf-a0c2-5046907ede9d req-fd9544b8-e84e-484e-88a3-13a0cc023deb service nova] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Received event network-vif-deleted-fcae7974-3037-4637-9e78-4b3d7df2d667 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 850.486375] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg ff95caff484f4b37b73931e53e540af5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 850.516966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff95caff484f4b37b73931e53e540af5 [ 850.539893] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Releasing lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.540868] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 850.541216] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 850.541632] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61b6abe7-1df4-4596-821c-978372f1d705 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.552558] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc97bbc0-6e39-4e5c-8c1b-58f7a36c3beb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.575760] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 53d6d89d-04bb-421d-994c-014830491dfa could not be found. [ 850.576228] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 850.576549] env[62109]: INFO nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 850.576918] env[62109]: DEBUG oslo.service.loopingcall [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.577292] env[62109]: DEBUG nova.compute.manager [-] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.577512] env[62109]: DEBUG nova.network.neutron [-] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 850.591318] env[62109]: DEBUG nova.network.neutron [-] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 850.591917] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 637eefd1aaf348e5a558fb35abf18d54 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 850.598697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 637eefd1aaf348e5a558fb35abf18d54 [ 850.799064] env[62109]: ERROR nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. [ 850.799064] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 850.799064] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 850.799064] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 850.799064] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 850.799064] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 850.799064] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 850.799064] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 850.799064] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 850.799064] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 850.799064] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 850.799064] env[62109]: ERROR nova.compute.manager raise self.value [ 850.799064] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 850.799064] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 850.799064] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 850.799064] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 850.799495] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 850.799495] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 850.799495] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. [ 850.799495] env[62109]: ERROR nova.compute.manager [ 850.799495] env[62109]: Traceback (most recent call last): [ 850.799495] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 850.799495] env[62109]: listener.cb(fileno) [ 850.799495] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 850.799495] env[62109]: result = function(*args, **kwargs) [ 850.799495] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 850.799495] env[62109]: return func(*args, **kwargs) [ 850.799495] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 850.799495] env[62109]: raise e [ 850.799495] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 850.799495] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 850.799495] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 850.799495] env[62109]: created_port_ids = self._update_ports_for_instance( [ 850.799495] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 850.799495] env[62109]: with excutils.save_and_reraise_exception(): [ 850.799495] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 850.799495] env[62109]: self.force_reraise() [ 850.799495] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 850.799495] env[62109]: raise self.value [ 850.799495] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 850.799495] env[62109]: updated_port = self._update_port( [ 850.799495] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 850.799495] env[62109]: _ensure_no_port_binding_failure(port) [ 850.799495] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 850.799495] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 850.800185] env[62109]: nova.exception.PortBindingFailed: Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. [ 850.800185] env[62109]: Removing descriptor: 16 [ 850.830434] env[62109]: DEBUG nova.scheduler.client.report [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.832899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 9e6cf64525f346478a32ce3ebef5819f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 850.844139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e6cf64525f346478a32ce3ebef5819f [ 850.989724] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 851.013026] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.013263] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.013487] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.013689] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.013836] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.013982] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.014188] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.014344] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.014506] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.014658] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.014820] env[62109]: DEBUG nova.virt.hardware [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.015750] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315b624e-d255-4943-977f-aaa274fccb5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.024346] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31732f0f-0704-45a0-a16c-4e0a90d6a018 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.037600] env[62109]: ERROR nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Traceback (most recent call last): [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] yield resources [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self.driver.spawn(context, instance, image_meta, [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] vm_ref = self.build_virtual_machine(instance, [ 851.037600] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] for vif in network_info: [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] return self._sync_wrapper(fn, *args, **kwargs) [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self.wait() [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self[:] = self._gt.wait() [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] return self._exit_event.wait() [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 851.037973] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] current.throw(*self._exc) [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] result = function(*args, **kwargs) [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] return func(*args, **kwargs) [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] raise e [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] nwinfo = self.network_api.allocate_for_instance( [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] created_port_ids = self._update_ports_for_instance( [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] with excutils.save_and_reraise_exception(): [ 851.038412] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self.force_reraise() [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] raise self.value [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] updated_port = self._update_port( [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] _ensure_no_port_binding_failure(port) [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] raise exception.PortBindingFailed(port_id=port['id']) [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] nova.exception.PortBindingFailed: Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. [ 851.038862] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] [ 851.038862] env[62109]: INFO nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Terminating instance [ 851.040247] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.040438] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquired lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.040606] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 851.041124] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 3d92def1f7ad473e98fa2189485e679b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 851.047709] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d92def1f7ad473e98fa2189485e679b [ 851.093550] env[62109]: DEBUG nova.network.neutron [-] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.093941] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d53e78c81a604652ac4df1c97747c45e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 851.101569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d53e78c81a604652ac4df1c97747c45e [ 851.336043] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.336043] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 851.337751] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 96ef014a45fd4b5f82f11ffd2a12463c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 851.338844] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.624s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.340601] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg ec1fcdb52bcb473aa493110a4ba1014f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 851.373252] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec1fcdb52bcb473aa493110a4ba1014f [ 851.373481] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96ef014a45fd4b5f82f11ffd2a12463c [ 851.556629] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 851.596192] env[62109]: INFO nova.compute.manager [-] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Took 1.02 seconds to deallocate network for instance. [ 851.600322] env[62109]: DEBUG nova.compute.claims [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 851.600521] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.631875] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.632395] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 69d52d75deb744bd9a38f7ae76f86dec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 851.641038] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69d52d75deb744bd9a38f7ae76f86dec [ 851.846856] env[62109]: DEBUG nova.compute.utils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 851.847461] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 0e6b5b68eff0493abe8fc6815dae99f9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 851.853039] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 851.853039] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 851.857512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e6b5b68eff0493abe8fc6815dae99f9 [ 851.891286] env[62109]: DEBUG nova.policy [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba5969be1a254281b4dffd81aa84ae7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa59611b27bb41beb282e68ccfa6fadc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 852.070510] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe944b3c-ecd6-4f12-b252-c2c884b91f8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.078468] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbee5ac-82a0-4d6e-ab0a-212d987dc3e6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.108777] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd478485-1d24-4f08-938e-8f9e85daf4c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.115893] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afeb5391-b453-493f-b9fe-cf9cc6b54a34 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.128668] env[62109]: DEBUG nova.compute.provider_tree [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.129206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 2b25d47194a94cd2b634d11efc974bfd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 852.134351] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Releasing lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.134836] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 852.135037] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 852.135289] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c8d4e2d-3905-42a4-b336-c5b810e113b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.139782] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b25d47194a94cd2b634d11efc974bfd [ 852.143429] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88833925-51cd-4122-a315-449e2ecd39a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.166725] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3ada5090-7219-4835-b508-2188501ae5e4 could not be found. [ 852.166966] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 852.167139] env[62109]: INFO nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 852.167373] env[62109]: DEBUG oslo.service.loopingcall [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.167581] env[62109]: DEBUG nova.compute.manager [-] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 852.167672] env[62109]: DEBUG nova.network.neutron [-] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 852.182888] env[62109]: DEBUG nova.network.neutron [-] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 852.183406] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6c177d694ff743c392e7cedbda666184 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 852.190408] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c177d694ff743c392e7cedbda666184 [ 852.212385] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Successfully created port: 4708138f-6f39-4fbc-8800-010c527d3c13 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.353876] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 852.355669] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg d27774a9b2b94a04a37cc3d193780314 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 852.378227] env[62109]: DEBUG nova.compute.manager [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Received event network-changed-e599aa10-a1f1-41b4-933c-2956e0b7e627 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 852.378409] env[62109]: DEBUG nova.compute.manager [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Refreshing instance network info cache due to event network-changed-e599aa10-a1f1-41b4-933c-2956e0b7e627. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 852.378617] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] Acquiring lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.378753] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] Acquired lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.378903] env[62109]: DEBUG nova.network.neutron [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Refreshing network info cache for port e599aa10-a1f1-41b4-933c-2956e0b7e627 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 852.379365] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] Expecting reply to msg 07ec87f05ea344de8dedb298f83ef821 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 852.387013] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07ec87f05ea344de8dedb298f83ef821 [ 852.391283] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d27774a9b2b94a04a37cc3d193780314 [ 852.632144] env[62109]: DEBUG nova.scheduler.client.report [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 852.634571] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 0fd8c46c563b47998869016f82ede635 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 852.649542] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fd8c46c563b47998869016f82ede635 [ 852.685030] env[62109]: DEBUG nova.network.neutron [-] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.685494] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c2085f2168274d61927e9f4d889673ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 852.694506] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2085f2168274d61927e9f4d889673ca [ 852.860301] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 32e34c45f0284666ac8ef5f7549692ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 852.896398] env[62109]: DEBUG nova.network.neutron [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 852.899286] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32e34c45f0284666ac8ef5f7549692ec [ 852.994607] env[62109]: DEBUG nova.network.neutron [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.995135] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] Expecting reply to msg a9959dd1772d459e8ea3740b4c8c8906 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 853.003625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9959dd1772d459e8ea3740b4c8c8906 [ 853.079198] env[62109]: ERROR nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. [ 853.079198] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 853.079198] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.079198] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 853.079198] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.079198] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 853.079198] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.079198] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 853.079198] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.079198] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 853.079198] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.079198] env[62109]: ERROR nova.compute.manager raise self.value [ 853.079198] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.079198] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 853.079198] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.079198] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 853.079849] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.079849] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 853.079849] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. [ 853.079849] env[62109]: ERROR nova.compute.manager [ 853.079849] env[62109]: Traceback (most recent call last): [ 853.079849] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 853.079849] env[62109]: listener.cb(fileno) [ 853.079849] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.079849] env[62109]: result = function(*args, **kwargs) [ 853.079849] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.079849] env[62109]: return func(*args, **kwargs) [ 853.079849] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.079849] env[62109]: raise e [ 853.079849] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.079849] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 853.079849] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.079849] env[62109]: created_port_ids = self._update_ports_for_instance( [ 853.079849] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.079849] env[62109]: with excutils.save_and_reraise_exception(): [ 853.079849] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.079849] env[62109]: self.force_reraise() [ 853.079849] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.079849] env[62109]: raise self.value [ 853.079849] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.079849] env[62109]: updated_port = self._update_port( [ 853.079849] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.079849] env[62109]: _ensure_no_port_binding_failure(port) [ 853.079849] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.079849] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 853.080664] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. [ 853.080664] env[62109]: Removing descriptor: 16 [ 853.142852] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.804s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.143484] env[62109]: ERROR nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Traceback (most recent call last): [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self.driver.spawn(context, instance, image_meta, [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] vm_ref = self.build_virtual_machine(instance, [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] vif_infos = vmwarevif.get_vif_info(self._session, [ 853.143484] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] for vif in network_info: [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] return self._sync_wrapper(fn, *args, **kwargs) [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self.wait() [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self[:] = self._gt.wait() [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] return self._exit_event.wait() [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] result = hub.switch() [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 853.143816] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] return self.greenlet.switch() [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] result = function(*args, **kwargs) [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] return func(*args, **kwargs) [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] raise e [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] nwinfo = self.network_api.allocate_for_instance( [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] created_port_ids = self._update_ports_for_instance( [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] with excutils.save_and_reraise_exception(): [ 853.144180] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] self.force_reraise() [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] raise self.value [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] updated_port = self._update_port( [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] _ensure_no_port_binding_failure(port) [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] raise exception.PortBindingFailed(port_id=port['id']) [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] nova.exception.PortBindingFailed: Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. [ 853.144542] env[62109]: ERROR nova.compute.manager [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] [ 853.144850] env[62109]: DEBUG nova.compute.utils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 853.146095] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Build of instance afc5587e-7fd5-4b07-aff8-98ef8358985f was re-scheduled: Binding failed for port 2be45127-dad8-4c66-98a1-3f9c554f3a2e, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 853.146514] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 853.146735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.146876] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquired lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.147265] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 853.147701] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg a99d06bc051943c9af022d36c558474e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 853.148930] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.117s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.150750] env[62109]: INFO nova.compute.claims [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.152088] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg f55c24a394ed41e5bb1fcb1fda085ef5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 853.154331] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a99d06bc051943c9af022d36c558474e [ 853.185474] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f55c24a394ed41e5bb1fcb1fda085ef5 [ 853.187313] env[62109]: INFO nova.compute.manager [-] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Took 1.02 seconds to deallocate network for instance. [ 853.189392] env[62109]: DEBUG nova.compute.claims [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 853.189575] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.363901] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 853.391048] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.391301] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.391454] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.391634] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.391779] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.391925] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.392153] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.392314] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.392477] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.392636] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.392804] env[62109]: DEBUG nova.virt.hardware [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.393666] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5def2d-3c58-4282-9554-e988acc9c67f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.402948] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4694b588-19fa-4f7d-bf03-c26f7fb279dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.420609] env[62109]: ERROR nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Traceback (most recent call last): [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] yield resources [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self.driver.spawn(context, instance, image_meta, [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] vm_ref = self.build_virtual_machine(instance, [ 853.420609] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] for vif in network_info: [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] return self._sync_wrapper(fn, *args, **kwargs) [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self.wait() [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self[:] = self._gt.wait() [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] return self._exit_event.wait() [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 853.421005] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] current.throw(*self._exc) [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] result = function(*args, **kwargs) [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] return func(*args, **kwargs) [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] raise e [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] nwinfo = self.network_api.allocate_for_instance( [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] created_port_ids = self._update_ports_for_instance( [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] with excutils.save_and_reraise_exception(): [ 853.421372] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self.force_reraise() [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] raise self.value [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] updated_port = self._update_port( [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] _ensure_no_port_binding_failure(port) [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] raise exception.PortBindingFailed(port_id=port['id']) [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] nova.exception.PortBindingFailed: Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. [ 853.421738] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] [ 853.421738] env[62109]: INFO nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Terminating instance [ 853.423419] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.423572] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.423736] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 853.424187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 9f5789358c2544e898b1c6fd892fc8e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 853.430825] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f5789358c2544e898b1c6fd892fc8e7 [ 853.503093] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] Releasing lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.503093] env[62109]: DEBUG nova.compute.manager [req-ec77d0d4-21e5-4a19-aa43-acf978870a39 req-cd0eedb9-ce91-48ac-97aa-59f638e79b59 service nova] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Received event network-vif-deleted-e599aa10-a1f1-41b4-933c-2956e0b7e627 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 853.655462] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 284f7483cbc24661be207c40d9c1d4f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 853.663697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 284f7483cbc24661be207c40d9c1d4f7 [ 853.669508] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.761007] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.761561] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg cb9137f7502445c3b654f5979442b9b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 853.769538] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb9137f7502445c3b654f5979442b9b6 [ 853.942011] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.019956] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.020585] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 0897f9368ed44e5c9fad77081d92ad66 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 854.028555] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0897f9368ed44e5c9fad77081d92ad66 [ 854.264035] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Releasing lock "refresh_cache-afc5587e-7fd5-4b07-aff8-98ef8358985f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.264291] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 854.264435] env[62109]: DEBUG nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 854.264595] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 854.279904] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.280755] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg aa6b179b4c4547e5a337c87acf4bf43b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 854.287517] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa6b179b4c4547e5a337c87acf4bf43b [ 854.404552] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad51541-56e6-4a2e-a653-8798dd330816 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.409871] env[62109]: DEBUG nova.compute.manager [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Received event network-changed-4708138f-6f39-4fbc-8800-010c527d3c13 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 854.410062] env[62109]: DEBUG nova.compute.manager [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Refreshing instance network info cache due to event network-changed-4708138f-6f39-4fbc-8800-010c527d3c13. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 854.410252] env[62109]: DEBUG oslo_concurrency.lockutils [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] Acquiring lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.413245] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19083f8c-e88f-417b-b00d-76049b652cfc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.442069] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5058f9-e346-4b66-87ad-ec70f1b95150 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.448669] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b20c4a-0d35-45b4-b139-48bc0df36778 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.461500] env[62109]: DEBUG nova.compute.provider_tree [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.461973] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 4189d8d2977b4864b4763c6670d18770 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 854.468695] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4189d8d2977b4864b4763c6670d18770 [ 854.523442] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.523442] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 854.523442] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 854.523442] env[62109]: DEBUG oslo_concurrency.lockutils [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] Acquired lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.523638] env[62109]: DEBUG nova.network.neutron [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Refreshing network info cache for port 4708138f-6f39-4fbc-8800-010c527d3c13 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 854.533016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] Expecting reply to msg 661d547bb5c74f4dbbba1b494f8bd8c6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 854.533016] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e8ed73e-3969-48e8-9e7a-f8eed85b858d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.535308] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a621e162-9471-4ef1-b012-3de0ef9cc69d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.544479] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 661d547bb5c74f4dbbba1b494f8bd8c6 [ 854.558098] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6163fcd4-cfe4-4432-ba8d-665319fa11ed could not be found. [ 854.558317] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 854.558486] env[62109]: INFO nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Took 0.04 seconds to destroy the instance on the hypervisor. [ 854.558716] env[62109]: DEBUG oslo.service.loopingcall [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.558916] env[62109]: DEBUG nova.compute.manager [-] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 854.559116] env[62109]: DEBUG nova.network.neutron [-] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 854.576406] env[62109]: DEBUG nova.network.neutron [-] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.576881] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e1af544df72c40c38ac913a5f33859d5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 854.586076] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1af544df72c40c38ac913a5f33859d5 [ 854.785121] env[62109]: DEBUG nova.network.neutron [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.785686] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg e69a3709bd9744249bdff0b534ded39b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 854.794498] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e69a3709bd9744249bdff0b534ded39b [ 854.964401] env[62109]: DEBUG nova.scheduler.client.report [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 854.966737] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg d3338aa5697b4c26b1cfab3d71d479e1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 854.981546] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3338aa5697b4c26b1cfab3d71d479e1 [ 855.042873] env[62109]: DEBUG nova.network.neutron [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.078460] env[62109]: DEBUG nova.network.neutron [-] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.078897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c007c9c4d7c64310aee1e65c5ca3fe26 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 855.088269] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c007c9c4d7c64310aee1e65c5ca3fe26 [ 855.133123] env[62109]: DEBUG nova.network.neutron [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.133630] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] Expecting reply to msg 57f4d97bd9224b46865af9f9fc300796 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 855.148647] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57f4d97bd9224b46865af9f9fc300796 [ 855.288093] env[62109]: INFO nova.compute.manager [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: afc5587e-7fd5-4b07-aff8-98ef8358985f] Took 1.02 seconds to deallocate network for instance. [ 855.290004] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 5f56de14cea440098be3a5de80a5f207 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 855.324638] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f56de14cea440098be3a5de80a5f207 [ 855.469681] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.470228] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 855.471926] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg c64e459078794fc3a3c39d270529c5c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 855.473059] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.457s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.474731] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg b633859931054cd68bebfca7cde54c6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 855.506110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c64e459078794fc3a3c39d270529c5c2 [ 855.509713] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b633859931054cd68bebfca7cde54c6d [ 855.580966] env[62109]: INFO nova.compute.manager [-] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Took 1.02 seconds to deallocate network for instance. [ 855.584579] env[62109]: DEBUG nova.compute.claims [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 855.584720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.635600] env[62109]: DEBUG oslo_concurrency.lockutils [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] Releasing lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.635885] env[62109]: DEBUG nova.compute.manager [req-86b8fda3-e593-4664-a7da-cf0ef9249898 req-3d97c956-1d02-49b5-8ed8-433fff7d07c5 service nova] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Received event network-vif-deleted-4708138f-6f39-4fbc-8800-010c527d3c13 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 855.794840] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 6ba6af9fe0484138be6ab358fa3c6b13 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 855.830562] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ba6af9fe0484138be6ab358fa3c6b13 [ 855.978023] env[62109]: DEBUG nova.compute.utils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 855.978705] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg c0eac83f1e1e4809ac7234eb17d6b9df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 855.987208] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 855.987208] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 855.989691] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0eac83f1e1e4809ac7234eb17d6b9df [ 856.024673] env[62109]: DEBUG nova.policy [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26e0d6bc5c7b451abae6add4088b1c62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68c6abcd904e419c828b564f5cd78bc5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 856.210281] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5132d887-2bf8-4add-92b0-5a8a9b403353 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.219335] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a978cc38-dabb-4972-9bba-2fd525687492 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.252037] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42096480-443b-4706-944a-2c2da2959d5d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.259580] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96941c98-5d8d-449e-ae38-38f5ca1641d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.274810] env[62109]: DEBUG nova.compute.provider_tree [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.275397] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 419c6ba2e71e4f31afb932e369a0792f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 856.283306] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 419c6ba2e71e4f31afb932e369a0792f [ 856.319582] env[62109]: INFO nova.scheduler.client.report [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Deleted allocations for instance afc5587e-7fd5-4b07-aff8-98ef8358985f [ 856.327197] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg c6b71fad38164e759a8a7120a561d786 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 856.348025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6b71fad38164e759a8a7120a561d786 [ 856.359788] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Successfully created port: 906773bb-0ccc-45dc-9288-5601845118c0 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.484395] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 856.486191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg c791a47e680846e1947c8b1567651971 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 856.516544] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c791a47e680846e1947c8b1567651971 [ 856.777684] env[62109]: DEBUG nova.scheduler.client.report [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.780357] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 4dfe2fda38da4cf6a975a86c87feb240 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 856.792839] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dfe2fda38da4cf6a975a86c87feb240 [ 856.842559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f54d10c6-4e90-4c0d-be44-3717a1211a85 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "afc5587e-7fd5-4b07-aff8-98ef8358985f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 162.630s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.843196] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 8d653b03889b4283a160b1e746311fd3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 856.855601] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d653b03889b4283a160b1e746311fd3 [ 856.991485] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 2e4d6aee851b4963bb6f0cdf0ea55b1d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 857.024051] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e4d6aee851b4963bb6f0cdf0ea55b1d [ 857.250522] env[62109]: DEBUG nova.compute.manager [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Received event network-changed-906773bb-0ccc-45dc-9288-5601845118c0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 857.250735] env[62109]: DEBUG nova.compute.manager [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Refreshing instance network info cache due to event network-changed-906773bb-0ccc-45dc-9288-5601845118c0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 857.250954] env[62109]: DEBUG oslo_concurrency.lockutils [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] Acquiring lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.251101] env[62109]: DEBUG oslo_concurrency.lockutils [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] Acquired lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.251268] env[62109]: DEBUG nova.network.neutron [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Refreshing network info cache for port 906773bb-0ccc-45dc-9288-5601845118c0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 857.251688] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] Expecting reply to msg 11aa80686aa94a4ba842e14c315367da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 857.258132] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11aa80686aa94a4ba842e14c315367da [ 857.282831] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.810s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.283442] env[62109]: ERROR nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Traceback (most recent call last): [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self.driver.spawn(context, instance, image_meta, [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] vm_ref = self.build_virtual_machine(instance, [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] vif_infos = vmwarevif.get_vif_info(self._session, [ 857.283442] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] for vif in network_info: [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] return self._sync_wrapper(fn, *args, **kwargs) [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self.wait() [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self[:] = self._gt.wait() [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] return self._exit_event.wait() [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] current.throw(*self._exc) [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.283821] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] result = function(*args, **kwargs) [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] return func(*args, **kwargs) [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] raise e [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] nwinfo = self.network_api.allocate_for_instance( [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] created_port_ids = self._update_ports_for_instance( [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] with excutils.save_and_reraise_exception(): [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] self.force_reraise() [ 857.284187] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] raise self.value [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] updated_port = self._update_port( [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] _ensure_no_port_binding_failure(port) [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] raise exception.PortBindingFailed(port_id=port['id']) [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] nova.exception.PortBindingFailed: Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. [ 857.284515] env[62109]: ERROR nova.compute.manager [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] [ 857.284515] env[62109]: DEBUG nova.compute.utils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 857.285333] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.062s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.287072] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg d5940607c4e44687b21f4024bc5e58d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 857.288731] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Build of instance 47b83dbe-d7d8-4875-bb79-95a8fecf4028 was re-scheduled: Binding failed for port ae7e19e9-1d82-47c1-96f4-06020432387b, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 857.289180] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 857.289398] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Acquiring lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.289543] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Acquired lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.289700] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.290102] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg f8eb33666d934940b1ce68a85f293cf8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 857.295559] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8eb33666d934940b1ce68a85f293cf8 [ 857.323325] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5940607c4e44687b21f4024bc5e58d7 [ 857.345003] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 857.347021] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 553497cef79c468aa8fc8dc8d0fbef7a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 857.381215] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 553497cef79c468aa8fc8dc8d0fbef7a [ 857.441981] env[62109]: ERROR nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. [ 857.441981] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 857.441981] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.441981] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 857.441981] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.441981] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 857.441981] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.441981] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 857.441981] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.441981] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 857.441981] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.441981] env[62109]: ERROR nova.compute.manager raise self.value [ 857.441981] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.441981] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 857.441981] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.441981] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 857.442605] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.442605] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 857.442605] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. [ 857.442605] env[62109]: ERROR nova.compute.manager [ 857.442605] env[62109]: Traceback (most recent call last): [ 857.442605] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 857.442605] env[62109]: listener.cb(fileno) [ 857.442605] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.442605] env[62109]: result = function(*args, **kwargs) [ 857.442605] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 857.442605] env[62109]: return func(*args, **kwargs) [ 857.442605] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.442605] env[62109]: raise e [ 857.442605] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.442605] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 857.442605] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.442605] env[62109]: created_port_ids = self._update_ports_for_instance( [ 857.442605] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.442605] env[62109]: with excutils.save_and_reraise_exception(): [ 857.442605] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.442605] env[62109]: self.force_reraise() [ 857.442605] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.442605] env[62109]: raise self.value [ 857.442605] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.442605] env[62109]: updated_port = self._update_port( [ 857.442605] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.442605] env[62109]: _ensure_no_port_binding_failure(port) [ 857.442605] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.442605] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 857.443329] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. [ 857.443329] env[62109]: Removing descriptor: 16 [ 857.499624] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 857.525476] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 857.525757] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 857.525878] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.526123] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 857.526303] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.526454] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 857.526666] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 857.526827] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 857.526993] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 857.527156] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 857.527328] env[62109]: DEBUG nova.virt.hardware [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 857.528242] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de086d22-b98c-469e-b6ee-330ec0b188b2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.537519] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870f48eb-6afe-4ab0-ae4e-dbc5c2f5dab7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.551160] env[62109]: ERROR nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Traceback (most recent call last): [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] yield resources [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self.driver.spawn(context, instance, image_meta, [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] vm_ref = self.build_virtual_machine(instance, [ 857.551160] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] vif_infos = vmwarevif.get_vif_info(self._session, [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] for vif in network_info: [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] return self._sync_wrapper(fn, *args, **kwargs) [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self.wait() [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self[:] = self._gt.wait() [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] return self._exit_event.wait() [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 857.551541] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] current.throw(*self._exc) [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] result = function(*args, **kwargs) [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] return func(*args, **kwargs) [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] raise e [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] nwinfo = self.network_api.allocate_for_instance( [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] created_port_ids = self._update_ports_for_instance( [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] with excutils.save_and_reraise_exception(): [ 857.552023] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self.force_reraise() [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] raise self.value [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] updated_port = self._update_port( [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] _ensure_no_port_binding_failure(port) [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] raise exception.PortBindingFailed(port_id=port['id']) [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] nova.exception.PortBindingFailed: Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. [ 857.552396] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] [ 857.552396] env[62109]: INFO nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Terminating instance [ 857.553799] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Acquiring lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.783647] env[62109]: DEBUG nova.network.neutron [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.808607] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.863489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.894435] env[62109]: DEBUG nova.network.neutron [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.894938] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] Expecting reply to msg 82668b8b922d40c78de7f3d2c573f266 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 857.896721] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.897123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 9f6480f6561646e88326632ab4c5db7e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 857.903668] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82668b8b922d40c78de7f3d2c573f266 [ 857.905212] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f6480f6561646e88326632ab4c5db7e [ 858.031627] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceb1e99-70f7-496a-a570-47a3d7529b11 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.039292] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ea4f38-d2f5-4dd4-8c21-7a49e4782af4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.069585] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfcc432-bdbd-4c23-bd7e-dd359c1c1c05 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.077057] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566aef49-53c2-44d9-8a84-036fa3d5958d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.089982] env[62109]: DEBUG nova.compute.provider_tree [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.090535] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 3291855fc88c4f258b427456b037f4f3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 858.097379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3291855fc88c4f258b427456b037f4f3 [ 858.397860] env[62109]: DEBUG oslo_concurrency.lockutils [req-9906f5b3-280c-46cd-a2a7-565475c3ed05 req-9e3fbc5a-a259-4daf-8ade-f1ec3973bb12 service nova] Releasing lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.398203] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Acquired lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.398203] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 858.399007] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 30d7d76e398649c1971ac18e1abe81bb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 858.399669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Releasing lock "refresh_cache-47b83dbe-d7d8-4875-bb79-95a8fecf4028" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.399882] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 858.400048] env[62109]: DEBUG nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.400239] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 858.405467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30d7d76e398649c1971ac18e1abe81bb [ 858.428618] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.428618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg b5abe03c9c5444549584c2859aa598fe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 858.435146] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5abe03c9c5444549584c2859aa598fe [ 858.593610] env[62109]: DEBUG nova.scheduler.client.report [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 858.596168] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 9f3b1eb7e9bc40f0b20794d736ea9da0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 858.607481] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f3b1eb7e9bc40f0b20794d736ea9da0 [ 858.919710] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.929281] env[62109]: DEBUG nova.network.neutron [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.929868] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 9a83b2a8984441a3b933b059c1750aee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 858.938593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a83b2a8984441a3b933b059c1750aee [ 859.008557] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.009279] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 5fd8d8f23a054493ba99f42f3120fff9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 859.017205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fd8d8f23a054493ba99f42f3120fff9 [ 859.098647] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.813s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.099303] env[62109]: ERROR nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Traceback (most recent call last): [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self.driver.spawn(context, instance, image_meta, [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] vm_ref = self.build_virtual_machine(instance, [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] vif_infos = vmwarevif.get_vif_info(self._session, [ 859.099303] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] for vif in network_info: [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] return self._sync_wrapper(fn, *args, **kwargs) [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self.wait() [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self[:] = self._gt.wait() [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] return self._exit_event.wait() [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] current.throw(*self._exc) [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 859.099661] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] result = function(*args, **kwargs) [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] return func(*args, **kwargs) [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] raise e [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] nwinfo = self.network_api.allocate_for_instance( [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] created_port_ids = self._update_ports_for_instance( [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] with excutils.save_and_reraise_exception(): [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] self.force_reraise() [ 859.100016] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] raise self.value [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] updated_port = self._update_port( [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] _ensure_no_port_binding_failure(port) [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] raise exception.PortBindingFailed(port_id=port['id']) [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] nova.exception.PortBindingFailed: Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. [ 859.100399] env[62109]: ERROR nova.compute.manager [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] [ 859.100399] env[62109]: DEBUG nova.compute.utils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 859.101274] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.016s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.102722] env[62109]: INFO nova.compute.claims [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.104264] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 7c4ec822c8614a8e9c731d8a019d69a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 859.106115] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Build of instance aa1afca5-8194-4a9d-bcd0-e3e91c15338c was re-scheduled: Binding failed for port f608a2e6-985c-4133-bb07-9b62be09d8d9, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 859.106543] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 859.106769] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.106914] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.107072] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 859.107464] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 19507a61cb114c0d85120520824e0b51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 859.114171] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19507a61cb114c0d85120520824e0b51 [ 859.139234] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c4ec822c8614a8e9c731d8a019d69a5 [ 859.348347] env[62109]: DEBUG nova.compute.manager [req-f0b2bb5a-be81-4d15-b49d-d48c1f8e0a21 req-0743fff1-3a1a-4a34-a8cb-be92c23616fa service nova] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Received event network-vif-deleted-906773bb-0ccc-45dc-9288-5601845118c0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 859.432451] env[62109]: INFO nova.compute.manager [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] [instance: 47b83dbe-d7d8-4875-bb79-95a8fecf4028] Took 1.03 seconds to deallocate network for instance. [ 859.434307] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 26e220cf958e4df0a6d4e328443b7652 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 859.468287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26e220cf958e4df0a6d4e328443b7652 [ 859.511643] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Releasing lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.512102] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 859.512305] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 859.512604] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c51958c-189e-4782-afba-5353a9a88b08 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.521283] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34856fcd-90da-4eb5-8a1d-8a4bd675774e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.556584] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 252b7e84-4f91-4078-a81c-392d622b6ce2 could not be found. [ 859.556878] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 859.557082] env[62109]: INFO nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 859.557399] env[62109]: DEBUG oslo.service.loopingcall [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.557686] env[62109]: DEBUG nova.compute.manager [-] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 859.557809] env[62109]: DEBUG nova.network.neutron [-] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 859.576410] env[62109]: DEBUG nova.network.neutron [-] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 859.577097] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 66af16bad24549229c75951fddc6112a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 859.587840] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66af16bad24549229c75951fddc6112a [ 859.610773] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 5fb8cc1e1f054197a2be3a3f8b59f2be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 859.618831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fb8cc1e1f054197a2be3a3f8b59f2be [ 859.630522] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 859.719767] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.720810] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 35a9224ef4d54847b70d2da9d12386d5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 859.728473] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35a9224ef4d54847b70d2da9d12386d5 [ 859.939105] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 2708ff96acc3402491dd74eb13d0df2d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 859.972436] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2708ff96acc3402491dd74eb13d0df2d [ 859.978228] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "d851f6a6-07aa-4e64-a007-8a42a8ae9c42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.979216] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "d851f6a6-07aa-4e64-a007-8a42a8ae9c42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.081259] env[62109]: DEBUG nova.network.neutron [-] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.081259] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dbcd7e0b5585406d914582185750e8f2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 860.089327] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbcd7e0b5585406d914582185750e8f2 [ 860.224714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-aa1afca5-8194-4a9d-bcd0-e3e91c15338c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.225284] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 860.225800] env[62109]: DEBUG nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 860.226353] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 860.242516] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 860.243729] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 56ad0e2fe27f4197bd3f51f2785eabe8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 860.253315] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56ad0e2fe27f4197bd3f51f2785eabe8 [ 860.413435] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c375b13-18fe-4045-af5f-26dd27bf0531 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.421861] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d9af88-e29a-4bd2-ba13-00b4e0c997a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.457053] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf5dcae-d551-4add-a646-c01688a20a82 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.464623] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737cb059-a003-4155-8fa7-c14fd7e555ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.479190] env[62109]: DEBUG nova.compute.provider_tree [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.479860] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 6b07cd34ea674729a945f416f389e31f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 860.481508] env[62109]: INFO nova.scheduler.client.report [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Deleted allocations for instance 47b83dbe-d7d8-4875-bb79-95a8fecf4028 [ 860.487673] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b07cd34ea674729a945f416f389e31f [ 860.488472] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Expecting reply to msg 47e16823778a423a835963bf893aa2c9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 860.502571] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47e16823778a423a835963bf893aa2c9 [ 860.583101] env[62109]: INFO nova.compute.manager [-] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Took 1.03 seconds to deallocate network for instance. [ 860.585693] env[62109]: DEBUG nova.compute.claims [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 860.586034] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.748464] env[62109]: DEBUG nova.network.neutron [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.749300] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg b7da09656ff9466ba0092a35ecf4a116 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 860.757852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7da09656ff9466ba0092a35ecf4a116 [ 860.988288] env[62109]: DEBUG nova.scheduler.client.report [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.990863] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg e95c8d08f8d842b0afda92351bac7af8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 860.993585] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9bb53392-410e-4565-9829-8cba0afd3a53 tempest-ServerActionsTestOtherA-442155103 tempest-ServerActionsTestOtherA-442155103-project-member] Lock "47b83dbe-d7d8-4875-bb79-95a8fecf4028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.279s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.994234] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg ff021d13e5364b0c9c9fd20f2b68eb91 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 861.005801] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff021d13e5364b0c9c9fd20f2b68eb91 [ 861.006941] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e95c8d08f8d842b0afda92351bac7af8 [ 861.251599] env[62109]: INFO nova.compute.manager [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: aa1afca5-8194-4a9d-bcd0-e3e91c15338c] Took 1.03 seconds to deallocate network for instance. [ 861.253500] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg e6996523597c4202a33a48ca21448410 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 861.290686] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6996523597c4202a33a48ca21448410 [ 861.493848] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.494395] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 861.497032] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 85176454a9cc4e479d56e029c80367d5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 861.504372] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.036s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.504372] env[62109]: DEBUG nova.objects.instance [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: 900e1e1e-5635-4782-bd87-046dd2af7dad] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 861.504372] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 11988f9222134b198cd23003b77e013a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 861.504372] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.507500] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg a458406e87f24d729fc8f79fb1195abe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 861.531415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11988f9222134b198cd23003b77e013a [ 861.557668] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85176454a9cc4e479d56e029c80367d5 [ 861.566316] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a458406e87f24d729fc8f79fb1195abe [ 861.757864] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 4354dc35e158411dbff024496f880323 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 861.786465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4354dc35e158411dbff024496f880323 [ 862.014386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg f36f64ac933c45d1a7c5b8ef9f69e0f5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 862.020715] env[62109]: DEBUG nova.compute.utils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.020715] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 16087c4d5b3d4230bd9e14d385f15fe1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 862.020715] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 862.020715] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 862.023834] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f36f64ac933c45d1a7c5b8ef9f69e0f5 [ 862.031128] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16087c4d5b3d4230bd9e14d385f15fe1 [ 862.040144] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.084913] env[62109]: DEBUG nova.policy [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4ea22bb21004f69a2b27d306493db45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '423f777bec3c474a91970fce3e308097', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 862.283337] env[62109]: INFO nova.scheduler.client.report [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Deleted allocations for instance aa1afca5-8194-4a9d-bcd0-e3e91c15338c [ 862.296049] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg c21218581fea4841ac24dd650acd1823 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 862.310378] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c21218581fea4841ac24dd650acd1823 [ 862.521773] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.522188] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d3984e34-1f38-49e7-8018-ddd55561e9fe tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 71a0d48360b94addbbb54f16d8b6ac30 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 862.523089] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.512s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.524876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 8591fdb991914fa2ac60b9078f344d08 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 862.526284] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 862.528062] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 912c3a46e61840b386c908dc79670ef0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 862.551265] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71a0d48360b94addbbb54f16d8b6ac30 [ 862.573187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8591fdb991914fa2ac60b9078f344d08 [ 862.578744] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 912c3a46e61840b386c908dc79670ef0 [ 862.615132] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Successfully created port: 364d95c6-b690-4798-ac6c-92324d6dbd83 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.797438] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d7571335-9041-4af4-97e0-a0db7879f77e tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "aa1afca5-8194-4a9d-bcd0-e3e91c15338c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.746s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.798071] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg ce277095f96641bf87d1eeff3b7ab401 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 862.809720] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce277095f96641bf87d1eeff3b7ab401 [ 863.035600] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 3c0f13027863414bb864143a033b1c6e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 863.071905] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c0f13027863414bb864143a033b1c6e [ 863.300478] env[62109]: DEBUG nova.compute.manager [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 863.302340] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 7159b8c9bb094e98bb9cc3e658387f8e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 863.304950] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d412bb-1e14-44a0-b40e-ead322be1cb5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.313605] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab041ca-f13f-4b2d-9d20-faaa9010ae70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.347696] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfa0259-b6ba-4341-af25-27668ae3517c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.356840] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aff7796-ca8e-4757-9e7b-e34cbfcd9896 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.372624] env[62109]: DEBUG nova.compute.provider_tree [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.373161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg d3ed83b240e247ef895f62ba6bab668e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 863.374331] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7159b8c9bb094e98bb9cc3e658387f8e [ 863.383118] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3ed83b240e247ef895f62ba6bab668e [ 863.546592] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 863.575092] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 863.575343] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 863.575498] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.575679] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 863.575822] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.576020] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 863.576247] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 863.576406] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 863.576567] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 863.576728] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 863.576895] env[62109]: DEBUG nova.virt.hardware [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.577739] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49a0784-0229-43dc-b6b3-41c2f8b110ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.585273] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b147ed-6495-4aa2-a85d-6326a9d41aee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.642499] env[62109]: DEBUG nova.compute.manager [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Received event network-changed-364d95c6-b690-4798-ac6c-92324d6dbd83 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 863.642692] env[62109]: DEBUG nova.compute.manager [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Refreshing instance network info cache due to event network-changed-364d95c6-b690-4798-ac6c-92324d6dbd83. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 863.642917] env[62109]: DEBUG oslo_concurrency.lockutils [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] Acquiring lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.643053] env[62109]: DEBUG oslo_concurrency.lockutils [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] Acquired lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.643209] env[62109]: DEBUG nova.network.neutron [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Refreshing network info cache for port 364d95c6-b690-4798-ac6c-92324d6dbd83 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 863.643663] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] Expecting reply to msg cffce2976038461aad91160aeb80f36e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 863.651067] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cffce2976038461aad91160aeb80f36e [ 863.826400] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.838371] env[62109]: ERROR nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. [ 863.838371] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 863.838371] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 863.838371] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 863.838371] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 863.838371] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 863.838371] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 863.838371] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 863.838371] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.838371] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 863.838371] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.838371] env[62109]: ERROR nova.compute.manager raise self.value [ 863.838371] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 863.838371] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 863.838371] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.838371] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 863.838874] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.838874] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 863.838874] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. [ 863.838874] env[62109]: ERROR nova.compute.manager [ 863.838874] env[62109]: Traceback (most recent call last): [ 863.838874] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 863.838874] env[62109]: listener.cb(fileno) [ 863.838874] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 863.838874] env[62109]: result = function(*args, **kwargs) [ 863.838874] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 863.838874] env[62109]: return func(*args, **kwargs) [ 863.838874] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 863.838874] env[62109]: raise e [ 863.838874] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 863.838874] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 863.838874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 863.838874] env[62109]: created_port_ids = self._update_ports_for_instance( [ 863.838874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 863.838874] env[62109]: with excutils.save_and_reraise_exception(): [ 863.838874] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.838874] env[62109]: self.force_reraise() [ 863.838874] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.838874] env[62109]: raise self.value [ 863.838874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 863.838874] env[62109]: updated_port = self._update_port( [ 863.838874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.838874] env[62109]: _ensure_no_port_binding_failure(port) [ 863.838874] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.838874] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 863.839768] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. [ 863.839768] env[62109]: Removing descriptor: 19 [ 863.839768] env[62109]: ERROR nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Traceback (most recent call last): [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] yield resources [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self.driver.spawn(context, instance, image_meta, [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 863.839768] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] vm_ref = self.build_virtual_machine(instance, [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] vif_infos = vmwarevif.get_vif_info(self._session, [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] for vif in network_info: [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] return self._sync_wrapper(fn, *args, **kwargs) [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self.wait() [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self[:] = self._gt.wait() [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] return self._exit_event.wait() [ 863.840146] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] result = hub.switch() [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] return self.greenlet.switch() [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] result = function(*args, **kwargs) [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] return func(*args, **kwargs) [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] raise e [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] nwinfo = self.network_api.allocate_for_instance( [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 863.840859] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] created_port_ids = self._update_ports_for_instance( [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] with excutils.save_and_reraise_exception(): [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self.force_reraise() [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] raise self.value [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] updated_port = self._update_port( [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] _ensure_no_port_binding_failure(port) [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.841312] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] raise exception.PortBindingFailed(port_id=port['id']) [ 863.841615] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] nova.exception.PortBindingFailed: Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. [ 863.841615] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] [ 863.841615] env[62109]: INFO nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Terminating instance [ 863.842983] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.876493] env[62109]: DEBUG nova.scheduler.client.report [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 863.878944] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg acdaad27bce243c186f4b65813f2d5fb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 863.891817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acdaad27bce243c186f4b65813f2d5fb [ 863.951432] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "55da10ab-e116-4ead-90ff-c82fffb2dcc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.951662] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "55da10ab-e116-4ead-90ff-c82fffb2dcc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.161825] env[62109]: DEBUG nova.network.neutron [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.265967] env[62109]: DEBUG nova.network.neutron [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.266522] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] Expecting reply to msg 0a96cdcab4354d77b3e39a9190ea1234 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 864.275031] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a96cdcab4354d77b3e39a9190ea1234 [ 864.382434] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.859s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.383032] env[62109]: ERROR nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Traceback (most recent call last): [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self.driver.spawn(context, instance, image_meta, [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] vm_ref = self.build_virtual_machine(instance, [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] vif_infos = vmwarevif.get_vif_info(self._session, [ 864.383032] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] for vif in network_info: [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] return self._sync_wrapper(fn, *args, **kwargs) [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self.wait() [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self[:] = self._gt.wait() [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] return self._exit_event.wait() [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] current.throw(*self._exc) [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 864.383368] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] result = function(*args, **kwargs) [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] return func(*args, **kwargs) [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] raise e [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] nwinfo = self.network_api.allocate_for_instance( [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] created_port_ids = self._update_ports_for_instance( [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] with excutils.save_and_reraise_exception(): [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] self.force_reraise() [ 864.383733] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] raise self.value [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] updated_port = self._update_port( [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] _ensure_no_port_binding_failure(port) [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] raise exception.PortBindingFailed(port_id=port['id']) [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] nova.exception.PortBindingFailed: Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. [ 864.384244] env[62109]: ERROR nova.compute.manager [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] [ 864.384244] env[62109]: DEBUG nova.compute.utils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 864.385110] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.810s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.385334] env[62109]: DEBUG nova.objects.instance [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lazy-loading 'resources' on Instance uuid 900e1e1e-5635-4782-bd87-046dd2af7dad {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.385685] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg fa0fde364e764739b417215ca16f048a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 864.386846] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Build of instance 732cf1e3-823d-4769-ad16-f5b492be53d5 was re-scheduled: Binding failed for port caac7c6a-04c0-41de-8a4c-52c979260348, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 864.387301] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 864.387522] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Acquiring lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.387668] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Acquired lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.387824] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 864.388287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg e54bab6fe4124ec0901587b9ec31c88e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 864.392526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa0fde364e764739b417215ca16f048a [ 864.396413] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e54bab6fe4124ec0901587b9ec31c88e [ 864.769416] env[62109]: DEBUG oslo_concurrency.lockutils [req-73839647-2f81-4e43-b917-5bf8842d7bd3 req-3dc390e2-68eb-4d11-ac07-fd2d6f48d6bf service nova] Releasing lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.769416] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquired lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.769416] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 864.769882] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg cd75a6923c5d403e86a98187ac23263f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 864.778192] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd75a6923c5d403e86a98187ac23263f [ 864.908398] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.988271] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.988869] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg aff4f3e1aedb44699f851d78a536376a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 864.998590] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aff4f3e1aedb44699f851d78a536376a [ 865.128963] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d5ea52-a9b3-4761-9bb9-57a2816f3712 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.137310] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c413d84-3178-4bb7-937a-4fe9b3cd38f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.166715] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc3cbb0-8d12-4542-96d3-74a80fe19857 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.174349] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136abae0-0088-466a-91d0-0a67a9294c0a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.188597] env[62109]: DEBUG nova.compute.provider_tree [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.189228] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg db0fcba1c1bf4f9f84a6bf3f882d843e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 865.196130] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db0fcba1c1bf4f9f84a6bf3f882d843e [ 865.287277] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 865.361482] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.362249] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 7a4092882ef34045af4f78a560aeda9d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 865.396454] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a4092882ef34045af4f78a560aeda9d [ 865.491669] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Releasing lock "refresh_cache-732cf1e3-823d-4769-ad16-f5b492be53d5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.491989] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 865.492321] env[62109]: DEBUG nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 865.492542] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 865.508650] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 865.509279] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 4d1bc7bd000e4dd1915179509c4f459d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 865.516920] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d1bc7bd000e4dd1915179509c4f459d [ 865.686698] env[62109]: DEBUG nova.compute.manager [req-c05c9f1a-2556-4f0c-ba1c-5e6cded2698f req-4d8e22e3-270f-4116-9b06-2888a14b49e4 service nova] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Received event network-vif-deleted-364d95c6-b690-4798-ac6c-92324d6dbd83 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 865.692844] env[62109]: DEBUG nova.scheduler.client.report [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 865.695220] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 3af5000b6fd54fe9878faadebde33a6e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 865.706514] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3af5000b6fd54fe9878faadebde33a6e [ 865.864895] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Releasing lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.865311] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 865.865509] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 865.865801] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3ea6f3b-77a1-48e6-a8ba-d9fb9a832163 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.874620] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eacb8d-754d-4c3f-a1c4-003f6be91707 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.896417] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e could not be found. [ 865.896614] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 865.896791] env[62109]: INFO nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 865.897294] env[62109]: DEBUG oslo.service.loopingcall [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.897518] env[62109]: DEBUG nova.compute.manager [-] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 865.897612] env[62109]: DEBUG nova.network.neutron [-] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 865.913383] env[62109]: DEBUG nova.network.neutron [-] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 865.913911] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg aaa73f4cbc8a459fac35e847b3bef446 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 865.921236] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aaa73f4cbc8a459fac35e847b3bef446 [ 866.011959] env[62109]: DEBUG nova.network.neutron [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.012529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 7cb528c9a7344de894462634a7e38ad6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 866.020693] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cb528c9a7344de894462634a7e38ad6 [ 866.198349] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.200824] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.593s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.202741] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 25e9ed1976b445c483bca942e3357ae7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 866.217342] env[62109]: INFO nova.scheduler.client.report [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Deleted allocations for instance 900e1e1e-5635-4782-bd87-046dd2af7dad [ 866.220849] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 093a20920adb43c2bb8338f739885a2f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 866.242544] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25e9ed1976b445c483bca942e3357ae7 [ 866.259897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 093a20920adb43c2bb8338f739885a2f [ 866.416352] env[62109]: DEBUG nova.network.neutron [-] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.416835] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bf37df8a8e6142ee80798e5b447c20ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 866.424519] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf37df8a8e6142ee80798e5b447c20ba [ 866.515247] env[62109]: INFO nova.compute.manager [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] [instance: 732cf1e3-823d-4769-ad16-f5b492be53d5] Took 1.02 seconds to deallocate network for instance. [ 866.517166] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg 18eedd4143cd4a94b1df938d48d1612d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 866.548106] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18eedd4143cd4a94b1df938d48d1612d [ 866.732977] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "900e1e1e-5635-4782-bd87-046dd2af7dad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.460s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.733511] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c66a8db4-a924-4609-b7f1-8d4fe40b048e tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg c7629028162c40c6a81ec2ff96fbbb36 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 866.754403] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7629028162c40c6a81ec2ff96fbbb36 [ 866.919288] env[62109]: INFO nova.compute.manager [-] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Took 1.02 seconds to deallocate network for instance. [ 866.922337] env[62109]: DEBUG nova.compute.claims [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 866.922552] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.928698] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 21770bf8a67847f1a88c07131611e84a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 866.938112] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21770bf8a67847f1a88c07131611e84a [ 866.939958] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476b0eb8-c5f6-4175-84cf-e359ca1b5029 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.948212] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ca1a9e-a493-43b1-9744-dac233400505 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.978213] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45407a8f-96db-4586-8e93-46bd56f2ca87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.985349] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b8e864-033c-4fa5-8be7-7ab4c75157be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.999763] env[62109]: DEBUG nova.compute.provider_tree [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.000302] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 6d842a338cd24481b74374f4f887ccf8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 867.006904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d842a338cd24481b74374f4f887ccf8 [ 867.021614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg f609f77477654610b6586924116d3760 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 867.050156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f609f77477654610b6586924116d3760 [ 867.239653] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 7525f2e8cd414e8897eeabaa70a10c39 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 867.248470] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7525f2e8cd414e8897eeabaa70a10c39 [ 867.502938] env[62109]: DEBUG nova.scheduler.client.report [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 867.505395] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 6bb24cf6648f4b4c93f20765b5d291d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 867.519379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bb24cf6648f4b4c93f20765b5d291d7 [ 867.542845] env[62109]: INFO nova.scheduler.client.report [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Deleted allocations for instance 732cf1e3-823d-4769-ad16-f5b492be53d5 [ 867.553036] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Expecting reply to msg ee748ffbeb854927984eb336c103dda7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 867.562904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee748ffbeb854927984eb336c103dda7 [ 867.741959] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "b95c60dc-50c4-4afc-acb0-3308e490b808" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.742200] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "b95c60dc-50c4-4afc-acb0-3308e490b808" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.742410] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "b95c60dc-50c4-4afc-acb0-3308e490b808-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.742593] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "b95c60dc-50c4-4afc-acb0-3308e490b808-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.742771] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "b95c60dc-50c4-4afc-acb0-3308e490b808-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.744921] env[62109]: INFO nova.compute.manager [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Terminating instance [ 867.746559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "refresh_cache-b95c60dc-50c4-4afc-acb0-3308e490b808" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.746713] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquired lock "refresh_cache-b95c60dc-50c4-4afc-acb0-3308e490b808" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.746876] env[62109]: DEBUG nova.network.neutron [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 867.747402] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 7dd81f82de374053a2d8419c51ef22e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 867.753091] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dd81f82de374053a2d8419c51ef22e2 [ 868.015987] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.816s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.016679] env[62109]: ERROR nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Traceback (most recent call last): [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self.driver.spawn(context, instance, image_meta, [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] vm_ref = self.build_virtual_machine(instance, [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] vif_infos = vmwarevif.get_vif_info(self._session, [ 868.016679] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] for vif in network_info: [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] return self._sync_wrapper(fn, *args, **kwargs) [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self.wait() [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self[:] = self._gt.wait() [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] return self._exit_event.wait() [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] result = hub.switch() [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 868.017021] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] return self.greenlet.switch() [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] result = function(*args, **kwargs) [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] return func(*args, **kwargs) [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] raise e [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] nwinfo = self.network_api.allocate_for_instance( [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] created_port_ids = self._update_ports_for_instance( [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] with excutils.save_and_reraise_exception(): [ 868.017388] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] self.force_reraise() [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] raise self.value [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] updated_port = self._update_port( [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] _ensure_no_port_binding_failure(port) [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] raise exception.PortBindingFailed(port_id=port['id']) [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] nova.exception.PortBindingFailed: Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. [ 868.017768] env[62109]: ERROR nova.compute.manager [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] [ 868.018089] env[62109]: DEBUG nova.compute.utils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 868.018692] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.418s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.020736] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 9d4b82575ffe43d38080d9e4244fabe4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 868.029729] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Build of instance 436788b9-92bb-4088-9c24-c2e9a073c09d was re-scheduled: Binding failed for port e91962f5-3d5c-4d08-af11-4df2df76c337, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 868.032350] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 868.032621] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Acquiring lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.032770] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Acquired lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.032925] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 868.033365] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 63f671fe2eba41acbef7f773b8101a10 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 868.042666] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63f671fe2eba41acbef7f773b8101a10 [ 868.063549] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d4b82575ffe43d38080d9e4244fabe4 [ 868.064151] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6c419abe-124e-4c6d-b61e-1f19380fe58a tempest-ServerRescueTestJSONUnderV235-376371306 tempest-ServerRescueTestJSONUnderV235-376371306-project-member] Lock "732cf1e3-823d-4769-ad16-f5b492be53d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.795s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.064685] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 6e470042753d47b49901d8152bd26bf9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 868.077525] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e470042753d47b49901d8152bd26bf9 [ 868.264204] env[62109]: DEBUG nova.network.neutron [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 868.330489] env[62109]: DEBUG nova.network.neutron [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.330886] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 101780bb5653450c934a5ac22dc50d6a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 868.339212] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 101780bb5653450c934a5ac22dc50d6a [ 868.553765] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 868.567388] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 868.570774] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg c1b846f5b05749c896a4c09ae434e030 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 868.618553] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1b846f5b05749c896a4c09ae434e030 [ 868.644238] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.644745] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg eab9408a1932484f9e17b759a8f7791f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 868.652419] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eab9408a1932484f9e17b759a8f7791f [ 868.773160] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7f20d3-1ab1-4c13-a477-62016c70f55e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.782573] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7784a0-69e4-477b-871c-97493492a113 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.812616] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7460caf-fa7e-4a2a-b335-3881524e1f07 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.819760] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc53eb6f-b597-4d7b-b461-5d865feef531 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.834224] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Releasing lock "refresh_cache-b95c60dc-50c4-4afc-acb0-3308e490b808" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.834652] env[62109]: DEBUG nova.compute.manager [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 868.834841] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 868.835295] env[62109]: DEBUG nova.compute.provider_tree [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.835767] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 89bdd682d3634a7ab2b32d161c826a60 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 868.840713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36436989-a229-4bdf-86b4-5c4f08cb5723 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.850729] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 868.851312] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89bdd682d3634a7ab2b32d161c826a60 [ 868.851699] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88315103-5bcb-4968-b6e0-99f307789ae4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.858220] env[62109]: DEBUG oslo_vmware.api [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 868.858220] env[62109]: value = "task-401494" [ 868.858220] env[62109]: _type = "Task" [ 868.858220] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.866290] env[62109]: DEBUG oslo_vmware.api [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.090714] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.146927] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Releasing lock "refresh_cache-436788b9-92bb-4088-9c24-c2e9a073c09d" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.147160] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 869.147343] env[62109]: DEBUG nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 869.147510] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 869.168230] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 869.168775] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 22de1bddcc8b47939684d04510c5f86b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 869.175242] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22de1bddcc8b47939684d04510c5f86b [ 869.341128] env[62109]: DEBUG nova.scheduler.client.report [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 869.343690] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 7425e73d47614a7d9c1f21981bbcc9f3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 869.357601] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7425e73d47614a7d9c1f21981bbcc9f3 [ 869.368607] env[62109]: DEBUG oslo_vmware.api [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401494, 'name': PowerOffVM_Task, 'duration_secs': 0.109724} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.368867] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 869.369070] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 869.369330] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74c82684-ada1-48e7-8935-50c786cf3b24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.396464] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 869.396776] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 869.397008] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Deleting the datastore file [datastore1] b95c60dc-50c4-4afc-acb0-3308e490b808 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.397387] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d5ef3c8-ec0d-4ea4-bf3d-fb089d2169d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.405356] env[62109]: DEBUG oslo_vmware.api [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for the task: (returnval){ [ 869.405356] env[62109]: value = "task-401496" [ 869.405356] env[62109]: _type = "Task" [ 869.405356] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.412887] env[62109]: DEBUG oslo_vmware.api [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401496, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.670837] env[62109]: DEBUG nova.network.neutron [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.671386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg f0a1e26bf21c4f4da0507b9406548857 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 869.679517] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0a1e26bf21c4f4da0507b9406548857 [ 869.847235] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.828s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.847235] env[62109]: ERROR nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. [ 869.847235] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Traceback (most recent call last): [ 869.847235] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 869.847235] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self.driver.spawn(context, instance, image_meta, [ 869.847235] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 869.847235] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 869.847235] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 869.847235] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] vm_ref = self.build_virtual_machine(instance, [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] vif_infos = vmwarevif.get_vif_info(self._session, [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] for vif in network_info: [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] return self._sync_wrapper(fn, *args, **kwargs) [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self.wait() [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self[:] = self._gt.wait() [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] return self._exit_event.wait() [ 869.847552] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] current.throw(*self._exc) [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] result = function(*args, **kwargs) [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] return func(*args, **kwargs) [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] raise e [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] nwinfo = self.network_api.allocate_for_instance( [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] created_port_ids = self._update_ports_for_instance( [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 869.847863] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] with excutils.save_and_reraise_exception(): [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] self.force_reraise() [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] raise self.value [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] updated_port = self._update_port( [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] _ensure_no_port_binding_failure(port) [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] raise exception.PortBindingFailed(port_id=port['id']) [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] nova.exception.PortBindingFailed: Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. [ 869.848212] env[62109]: ERROR nova.compute.manager [instance: 53d6d89d-04bb-421d-994c-014830491dfa] [ 869.848562] env[62109]: DEBUG nova.compute.utils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 869.849188] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.660s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.851333] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 86fce54593cd48f18a5352063fb6ff56 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 869.854975] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Build of instance 53d6d89d-04bb-421d-994c-014830491dfa was re-scheduled: Binding failed for port fcae7974-3037-4637-9e78-4b3d7df2d667, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 869.855430] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 869.855659] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Acquiring lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.855805] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Acquired lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.855965] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 869.856373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 3c186fe7965044fbbd2937994bcd6217 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 869.863482] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c186fe7965044fbbd2937994bcd6217 [ 869.888832] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86fce54593cd48f18a5352063fb6ff56 [ 869.916800] env[62109]: DEBUG oslo_vmware.api [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Task: {'id': task-401496, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084346} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.918416] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.918416] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 869.918416] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 869.918416] env[62109]: INFO nova.compute.manager [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Took 1.08 seconds to destroy the instance on the hypervisor. [ 869.918416] env[62109]: DEBUG oslo.service.loopingcall [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.918864] env[62109]: DEBUG nova.compute.manager [-] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 869.918864] env[62109]: DEBUG nova.network.neutron [-] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 869.943589] env[62109]: DEBUG nova.network.neutron [-] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 869.944319] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 99ed56b157f74c058b52cf9ce5a2dd20 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 869.955756] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99ed56b157f74c058b52cf9ce5a2dd20 [ 870.173978] env[62109]: INFO nova.compute.manager [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] [instance: 436788b9-92bb-4088-9c24-c2e9a073c09d] Took 1.03 seconds to deallocate network for instance. [ 870.175639] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 0c20e84afe46435da305a2948a708e73 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 870.218486] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c20e84afe46435da305a2948a708e73 [ 870.376523] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.446230] env[62109]: DEBUG nova.network.neutron [-] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.446664] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8202ae5234b8413baa2b66c66a4723c3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 870.455663] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.456257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 8f9cb44ec6dd40448c56478589a666d1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 870.457238] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8202ae5234b8413baa2b66c66a4723c3 [ 870.464920] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f9cb44ec6dd40448c56478589a666d1 [ 870.543164] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6516082-8ded-4bc6-9382-ea6e9d1e21db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.551001] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8935b7-ce37-4f17-8596-a0f240c6f3cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.582146] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b6dfae-61af-4c9d-a3e5-5014c8cb15fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.589108] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8ae36b-5b26-443e-9168-4613b626e8fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.602011] env[62109]: DEBUG nova.compute.provider_tree [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.602512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 6875ead75b384474a5d502c13dbbfdd0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 870.609086] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6875ead75b384474a5d502c13dbbfdd0 [ 870.679779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg a18e8162d8b94f63995de1118dda3c2b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 870.712152] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a18e8162d8b94f63995de1118dda3c2b [ 870.949304] env[62109]: INFO nova.compute.manager [-] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Took 1.03 seconds to deallocate network for instance. [ 870.953251] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg cb6f9cbf650a4bbeb5dc427bc40fa555 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 870.958135] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Releasing lock "refresh_cache-53d6d89d-04bb-421d-994c-014830491dfa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.958364] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 870.958546] env[62109]: DEBUG nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 870.958709] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 870.973457] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.974114] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 51aafe0d43154185a951f6ab9dc7da0a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 870.978886] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb6f9cbf650a4bbeb5dc427bc40fa555 [ 870.980896] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51aafe0d43154185a951f6ab9dc7da0a [ 871.104763] env[62109]: DEBUG nova.scheduler.client.report [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 871.107175] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 53a9f08a8846464d94de5567b2043b7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 871.121587] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53a9f08a8846464d94de5567b2043b7d [ 871.200031] env[62109]: INFO nova.scheduler.client.report [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Deleted allocations for instance 436788b9-92bb-4088-9c24-c2e9a073c09d [ 871.205667] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Expecting reply to msg 1516f5001dc14cfcb598c5899897a658 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 871.221187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1516f5001dc14cfcb598c5899897a658 [ 871.456494] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.476095] env[62109]: DEBUG nova.network.neutron [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.476621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 24e2220ba93c4b1198105725cf62e9cc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 871.484355] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24e2220ba93c4b1198105725cf62e9cc [ 871.610778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.761s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.611446] env[62109]: ERROR nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Traceback (most recent call last): [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self.driver.spawn(context, instance, image_meta, [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] vm_ref = self.build_virtual_machine(instance, [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 871.611446] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] for vif in network_info: [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] return self._sync_wrapper(fn, *args, **kwargs) [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self.wait() [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self[:] = self._gt.wait() [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] return self._exit_event.wait() [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] current.throw(*self._exc) [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 871.611816] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] result = function(*args, **kwargs) [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] return func(*args, **kwargs) [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] raise e [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] nwinfo = self.network_api.allocate_for_instance( [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] created_port_ids = self._update_ports_for_instance( [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] with excutils.save_and_reraise_exception(): [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] self.force_reraise() [ 871.612231] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] raise self.value [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] updated_port = self._update_port( [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] _ensure_no_port_binding_failure(port) [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] raise exception.PortBindingFailed(port_id=port['id']) [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] nova.exception.PortBindingFailed: Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. [ 871.612539] env[62109]: ERROR nova.compute.manager [instance: 3ada5090-7219-4835-b508-2188501ae5e4] [ 871.612539] env[62109]: DEBUG nova.compute.utils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 871.613523] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.029s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.615194] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg f54c9ae8651c4dccb2e351afbc5e449a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 871.616422] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Build of instance 3ada5090-7219-4835-b508-2188501ae5e4 was re-scheduled: Binding failed for port e599aa10-a1f1-41b4-933c-2956e0b7e627, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 871.616870] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 871.617093] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.617240] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquired lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.617390] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 871.617744] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 0bc0281fcb9f41ed852640eb16006b0b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 871.625250] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bc0281fcb9f41ed852640eb16006b0b [ 871.646466] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f54c9ae8651c4dccb2e351afbc5e449a [ 871.707674] env[62109]: DEBUG oslo_concurrency.lockutils [None req-cf2a0747-5816-4e46-b6db-fa085d393fe4 tempest-ServerMetadataNegativeTestJSON-911262689 tempest-ServerMetadataNegativeTestJSON-911262689-project-member] Lock "436788b9-92bb-4088-9c24-c2e9a073c09d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 157.096s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.708278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg c01df0ccc6334d6098acbd9101a49950 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 871.717770] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c01df0ccc6334d6098acbd9101a49950 [ 871.979066] env[62109]: INFO nova.compute.manager [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] [instance: 53d6d89d-04bb-421d-994c-014830491dfa] Took 1.02 seconds to deallocate network for instance. [ 871.980968] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 1307027b1bec4668a27e11d06814013f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 872.014343] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1307027b1bec4668a27e11d06814013f [ 872.140677] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 872.210672] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 872.212405] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 9e663ae6d4e0462191b5a32fc7d15860 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 872.227843] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.228381] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg a03f9186d3d249b4be650737f61ac852 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 872.236971] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a03f9186d3d249b4be650737f61ac852 [ 872.248210] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e663ae6d4e0462191b5a32fc7d15860 [ 872.300141] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb720110-a021-4a18-9bcd-21e9947fc24d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.307632] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a88542-d720-456d-8693-a6803b4803ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.338103] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e74bd0-2031-4828-b501-cda33988a4fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.344690] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beeae0ae-526f-4b40-a347-fb3c7dd381a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.357390] env[62109]: DEBUG nova.compute.provider_tree [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.357889] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 9d062e66edf54bf7926ca7bceaa5ffd7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 872.365403] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d062e66edf54bf7926ca7bceaa5ffd7 [ 872.487864] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg 273b41eaabd94717a3abb669b9840842 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 872.527360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 273b41eaabd94717a3abb669b9840842 [ 872.728696] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.732353] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Releasing lock "refresh_cache-3ada5090-7219-4835-b508-2188501ae5e4" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.732611] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 872.732810] env[62109]: DEBUG nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 872.732979] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 872.749244] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 872.749801] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg efaa7ce3226e4170b73889b2ad0bbde5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 872.757770] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efaa7ce3226e4170b73889b2ad0bbde5 [ 872.860455] env[62109]: DEBUG nova.scheduler.client.report [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 872.862881] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg bfe4efe82eb248f9abc5cdd227d40f4b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 872.874619] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfe4efe82eb248f9abc5cdd227d40f4b [ 873.012033] env[62109]: INFO nova.scheduler.client.report [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Deleted allocations for instance 53d6d89d-04bb-421d-994c-014830491dfa [ 873.024497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Expecting reply to msg a5cce889d0c24cc8a165953c6afc4e86 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 873.041569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5cce889d0c24cc8a165953c6afc4e86 [ 873.251842] env[62109]: DEBUG nova.network.neutron [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.252411] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 5ac547cfc1fd4a3bb018d0f554ee7c65 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 873.260735] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ac547cfc1fd4a3bb018d0f554ee7c65 [ 873.365790] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.752s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.366551] env[62109]: ERROR nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Traceback (most recent call last): [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self.driver.spawn(context, instance, image_meta, [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] vm_ref = self.build_virtual_machine(instance, [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 873.366551] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] for vif in network_info: [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] return self._sync_wrapper(fn, *args, **kwargs) [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self.wait() [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self[:] = self._gt.wait() [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] return self._exit_event.wait() [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] current.throw(*self._exc) [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 873.366897] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] result = function(*args, **kwargs) [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] return func(*args, **kwargs) [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] raise e [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] nwinfo = self.network_api.allocate_for_instance( [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] created_port_ids = self._update_ports_for_instance( [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] with excutils.save_and_reraise_exception(): [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] self.force_reraise() [ 873.367209] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] raise self.value [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] updated_port = self._update_port( [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] _ensure_no_port_binding_failure(port) [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] raise exception.PortBindingFailed(port_id=port['id']) [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] nova.exception.PortBindingFailed: Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. [ 873.367518] env[62109]: ERROR nova.compute.manager [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] [ 873.367518] env[62109]: DEBUG nova.compute.utils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 873.368625] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.505s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.370414] env[62109]: INFO nova.compute.claims [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.372161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg c3a8877dd39d489db56697c6777a5391 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 873.378942] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Build of instance 6163fcd4-cfe4-4432-ba8d-665319fa11ed was re-scheduled: Binding failed for port 4708138f-6f39-4fbc-8800-010c527d3c13, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 873.378942] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 873.378942] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.378942] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.378942] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 873.379161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 974dbf3defc54eea9fa8a783803792ae in queue reply_7522b64acfeb4981b1f36928b040d568 [ 873.384393] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 974dbf3defc54eea9fa8a783803792ae [ 873.418152] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3a8877dd39d489db56697c6777a5391 [ 873.527629] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1a371740-4d2c-404d-9660-d265d4a3da45 tempest-ServerActionsTestJSON-1289460322 tempest-ServerActionsTestJSON-1289460322-project-member] Lock "53d6d89d-04bb-421d-994c-014830491dfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 156.217s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.527629] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 169803befc2b4ae7a1827e1367071903 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 873.537748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 169803befc2b4ae7a1827e1367071903 [ 873.759242] env[62109]: INFO nova.compute.manager [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 3ada5090-7219-4835-b508-2188501ae5e4] Took 1.02 seconds to deallocate network for instance. [ 873.759242] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 13e1317bb70e4b678e056d9eb6a9480f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 873.800149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13e1317bb70e4b678e056d9eb6a9480f [ 873.877748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 6d384285ea4b445aa2b20b8332a74b8e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 873.886534] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d384285ea4b445aa2b20b8332a74b8e [ 873.935973] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 874.029665] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 874.031942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 8e66f65a3c09459ea22ff2642b8bd9e0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 874.068824] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e66f65a3c09459ea22ff2642b8bd9e0 [ 874.103649] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.103649] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 4957b330078d44b68c495860a3a7c2a1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 874.112103] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4957b330078d44b68c495860a3a7c2a1 [ 874.264089] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 4deb80a6c86d42959c65f63cd92476c6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 874.321226] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4deb80a6c86d42959c65f63cd92476c6 [ 874.553720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.555897] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6630ee17-206a-46ff-8d5f-9da72daca729 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.563727] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88397ea9-b061-45d8-a7ae-1f56af4aa069 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.594272] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bffdce9-d2f4-423c-86b7-48df0acb0983 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.601315] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9bbd41-c0bd-48a0-84fb-53619b1c9a91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.613886] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-6163fcd4-cfe4-4432-ba8d-665319fa11ed" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.614104] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 874.614278] env[62109]: DEBUG nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 874.614437] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 874.616219] env[62109]: DEBUG nova.compute.provider_tree [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.616686] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg c9d49510f629442ab919e1cdd29dabfd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 874.625286] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9d49510f629442ab919e1cdd29dabfd [ 874.630374] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 874.630925] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a6b6b8e7dbab4c9c923dd0c4d4ee1ce6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 874.638064] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6b6b8e7dbab4c9c923dd0c4d4ee1ce6 [ 874.789966] env[62109]: INFO nova.scheduler.client.report [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Deleted allocations for instance 3ada5090-7219-4835-b508-2188501ae5e4 [ 874.796342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 65cb8baa1e8f49e6b530fe2cf0a3b3da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 874.810609] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65cb8baa1e8f49e6b530fe2cf0a3b3da [ 875.119393] env[62109]: DEBUG nova.scheduler.client.report [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.122281] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 91fabfe0818c4c2ebe17aef8ed90d521 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 875.132903] env[62109]: DEBUG nova.network.neutron [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.133950] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 32b6905cd1464b9292551b7c51b82fbe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 875.145069] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91fabfe0818c4c2ebe17aef8ed90d521 [ 875.146341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32b6905cd1464b9292551b7c51b82fbe [ 875.298802] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5dc46801-54e7-4816-8481-80ed741cbbc9 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "3ada5090-7219-4835-b508-2188501ae5e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.771s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.299462] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 3d3246b978ef4865941e507dd3edcdce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 875.312565] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d3246b978ef4865941e507dd3edcdce [ 875.624712] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.256s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.625099] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 875.626949] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg e2a5137f065c4352ac1a2e1a1e195474 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 875.628123] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.042s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.630200] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 0751b9a9731840958e282736e5627e3d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 875.636871] env[62109]: INFO nova.compute.manager [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 6163fcd4-cfe4-4432-ba8d-665319fa11ed] Took 1.02 seconds to deallocate network for instance. [ 875.638527] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 750cdf15ac4944c3844ce01400e40b98 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 875.663212] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0751b9a9731840958e282736e5627e3d [ 875.680286] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2a5137f065c4352ac1a2e1a1e195474 [ 875.687684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 750cdf15ac4944c3844ce01400e40b98 [ 875.801891] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 875.803697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg c797ee67d3d64573a7af48a62ae18477 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 875.851942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c797ee67d3d64573a7af48a62ae18477 [ 876.141206] env[62109]: DEBUG nova.compute.utils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 876.141206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg d6a4e954391645a0bbd0fb449bbaf8ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 876.144506] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 876.144506] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 876.149948] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg ad5c9e237ecd479f8ceb70b264b87162 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 876.159564] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6a4e954391645a0bbd0fb449bbaf8ca [ 876.182339] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad5c9e237ecd479f8ceb70b264b87162 [ 876.271217] env[62109]: DEBUG nova.policy [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20e2b6e1fa34439587b2b0fc42646c1e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a7b7a7806e84f1ebca2207ff836ce46', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 876.321803] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.352166] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25d11b7-bd42-454d-b4f7-71489f1a3204 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.359939] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125484c4-f409-4e3b-9f29-f4a13d4be70c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.393623] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1cbcc2-0e6e-4d43-bc72-9e17d71e26a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.414635] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d263196-645e-49e5-90f2-1f16ae628309 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.430779] env[62109]: DEBUG nova.compute.provider_tree [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.431374] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg af2e65903a384afbb8c33634ecaeecd2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 876.439625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af2e65903a384afbb8c33634ecaeecd2 [ 876.644504] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 876.646445] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 7baf4ba97b9640b9b54382f89a0def22 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 876.678546] env[62109]: INFO nova.scheduler.client.report [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Deleted allocations for instance 6163fcd4-cfe4-4432-ba8d-665319fa11ed [ 876.685553] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 58cb295994a544a79278a98b4b7e7a29 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 876.695232] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7baf4ba97b9640b9b54382f89a0def22 [ 876.701811] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58cb295994a544a79278a98b4b7e7a29 [ 876.847170] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Successfully created port: b79d0813-93ae-42da-a060-e3a3b0e18d63 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 876.936989] env[62109]: DEBUG nova.scheduler.client.report [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 876.939736] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg abc92895ca344b78b0ec8a441337de59 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 876.966834] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abc92895ca344b78b0ec8a441337de59 [ 877.151093] env[62109]: INFO nova.virt.block_device [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Booting with volume 9435a93d-7799-4ec6-91c6-ecbe7ea15abc at /dev/sda [ 877.187982] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fc11f9d3-77d4-4557-afb7-a5d4edf5e862 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "6163fcd4-cfe4-4432-ba8d-665319fa11ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.860s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.188603] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 4f5c92e699bb47bf87610003f4f58401 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 877.197323] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a40b7b4d-ffe0-47a6-aa5b-f5d36b0b723b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.203012] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f5c92e699bb47bf87610003f4f58401 [ 877.208953] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a1f1a4-83db-49b5-8cdd-978e57556b06 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.234404] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-acf9a167-3e21-4bc3-a7c2-22ab49503fc2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.246897] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48979fc3-1f2b-4837-97e3-0ce440e41117 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.272190] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e4e1aa-9f3d-4eb3-81b6-c4bf23d1bbf6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.278497] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff8b5ec-9ce0-4464-af05-bb856b832a7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.291817] env[62109]: DEBUG nova.virt.block_device [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Updating existing volume attachment record: 075519bd-337c-41bc-86d2-2aba2978da8d {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 877.442877] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.815s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.443545] env[62109]: ERROR nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Traceback (most recent call last): [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self.driver.spawn(context, instance, image_meta, [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] vm_ref = self.build_virtual_machine(instance, [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] vif_infos = vmwarevif.get_vif_info(self._session, [ 877.443545] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] for vif in network_info: [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] return self._sync_wrapper(fn, *args, **kwargs) [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self.wait() [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self[:] = self._gt.wait() [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] return self._exit_event.wait() [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] current.throw(*self._exc) [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 877.443881] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] result = function(*args, **kwargs) [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] return func(*args, **kwargs) [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] raise e [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] nwinfo = self.network_api.allocate_for_instance( [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] created_port_ids = self._update_ports_for_instance( [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] with excutils.save_and_reraise_exception(): [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] self.force_reraise() [ 877.444238] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] raise self.value [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] updated_port = self._update_port( [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] _ensure_no_port_binding_failure(port) [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] raise exception.PortBindingFailed(port_id=port['id']) [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] nova.exception.PortBindingFailed: Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. [ 877.444569] env[62109]: ERROR nova.compute.manager [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] [ 877.444569] env[62109]: DEBUG nova.compute.utils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 877.445618] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.406s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.447222] env[62109]: INFO nova.compute.claims [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.448860] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 45e9839640a846d3bb3b02389a144fef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 877.450121] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Build of instance 252b7e84-4f91-4078-a81c-392d622b6ce2 was re-scheduled: Binding failed for port 906773bb-0ccc-45dc-9288-5601845118c0, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 877.450687] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 877.450915] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Acquiring lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.451057] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Acquired lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.451208] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 877.451564] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 16aeb0a27aba48e8941ccf6e53d12e3b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 877.457545] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16aeb0a27aba48e8941ccf6e53d12e3b [ 877.490082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45e9839640a846d3bb3b02389a144fef [ 877.690546] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 877.692321] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 1f6f5ace42704f56bc2bcbfa9eb08dd3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 877.736702] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f6f5ace42704f56bc2bcbfa9eb08dd3 [ 877.874327] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 33771ea87a784bf0aee1f7be664e4615 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 877.888810] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33771ea87a784bf0aee1f7be664e4615 [ 877.925146] env[62109]: DEBUG nova.compute.manager [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Received event network-changed-b79d0813-93ae-42da-a060-e3a3b0e18d63 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 877.925347] env[62109]: DEBUG nova.compute.manager [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Refreshing instance network info cache due to event network-changed-b79d0813-93ae-42da-a060-e3a3b0e18d63. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 877.925556] env[62109]: DEBUG oslo_concurrency.lockutils [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] Acquiring lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.925691] env[62109]: DEBUG oslo_concurrency.lockutils [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] Acquired lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.925850] env[62109]: DEBUG nova.network.neutron [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Refreshing network info cache for port b79d0813-93ae-42da-a060-e3a3b0e18d63 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 877.926350] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] Expecting reply to msg 61a090603cd34671a6d1dd669dcbac49 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 877.933087] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61a090603cd34671a6d1dd669dcbac49 [ 877.954607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d8426a3b986e47f08d65f7bd405e8f9f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 877.962174] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8426a3b986e47f08d65f7bd405e8f9f [ 877.975403] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.029399] env[62109]: ERROR nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. [ 878.029399] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 878.029399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.029399] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 878.029399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 878.029399] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 878.029399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 878.029399] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 878.029399] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.029399] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 878.029399] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.029399] env[62109]: ERROR nova.compute.manager raise self.value [ 878.029399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 878.029399] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 878.029399] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.029399] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 878.030073] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.030073] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 878.030073] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. [ 878.030073] env[62109]: ERROR nova.compute.manager [ 878.030073] env[62109]: Traceback (most recent call last): [ 878.030073] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 878.030073] env[62109]: listener.cb(fileno) [ 878.030073] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 878.030073] env[62109]: result = function(*args, **kwargs) [ 878.030073] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 878.030073] env[62109]: return func(*args, **kwargs) [ 878.030073] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 878.030073] env[62109]: raise e [ 878.030073] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.030073] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 878.030073] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 878.030073] env[62109]: created_port_ids = self._update_ports_for_instance( [ 878.030073] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 878.030073] env[62109]: with excutils.save_and_reraise_exception(): [ 878.030073] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.030073] env[62109]: self.force_reraise() [ 878.030073] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.030073] env[62109]: raise self.value [ 878.030073] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 878.030073] env[62109]: updated_port = self._update_port( [ 878.030073] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.030073] env[62109]: _ensure_no_port_binding_failure(port) [ 878.030073] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.030073] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 878.030839] env[62109]: nova.exception.PortBindingFailed: Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. [ 878.030839] env[62109]: Removing descriptor: 19 [ 878.134212] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.134212] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 3c33de9aaa5c40fb96a0f114805d0fcc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 878.144450] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c33de9aaa5c40fb96a0f114805d0fcc [ 878.213248] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.221248] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "87dff872-a469-465f-9c74-4524a2eab013" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.221488] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "87dff872-a469-465f-9c74-4524a2eab013" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.416882] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 3e794f3afcbd461f8f42cee47c2dec04 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 878.430976] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e794f3afcbd461f8f42cee47c2dec04 [ 878.451676] env[62109]: DEBUG nova.network.neutron [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.593722] env[62109]: DEBUG nova.network.neutron [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.594628] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] Expecting reply to msg c3366f99a1cd4a4daf87e3e5dc926670 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 878.603930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3366f99a1cd4a4daf87e3e5dc926670 [ 878.638112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Releasing lock "refresh_cache-252b7e84-4f91-4078-a81c-392d622b6ce2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.638364] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 878.638541] env[62109]: DEBUG nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 878.638707] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 878.657066] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.657628] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 178c0d618bde4379b5e845e995dfea70 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 878.671796] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 178c0d618bde4379b5e845e995dfea70 [ 878.674049] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f460f0b-b91b-4d8f-af5e-3325d6a805f1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.682714] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da8ba6f-a679-47eb-b057-726873044c9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.716708] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24e0f04-11cd-49b8-9df7-da541286d921 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.723960] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ac95b1-738d-4fee-be71-98f01fa505a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.741382] env[62109]: DEBUG nova.compute.provider_tree [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.741894] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 90845425a7244e1db840e62861fa9d9a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 878.750742] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90845425a7244e1db840e62861fa9d9a [ 878.899244] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "bab79bb6-1638-4eee-812d-da1372134873" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.899563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "bab79bb6-1638-4eee-812d-da1372134873" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.920984] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 9220123cc76e43de9003cbf6aec6d399 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 878.953538] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9220123cc76e43de9003cbf6aec6d399 [ 879.097485] env[62109]: DEBUG oslo_concurrency.lockutils [req-e5958c2e-43fb-41f8-bb54-2d21a3fa9429 req-eabb9acb-a822-4fc4-8656-d0208a354b88 service nova] Releasing lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.159776] env[62109]: DEBUG nova.network.neutron [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.160950] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 38d21cfcf9df4f8c832640f2b473cbe7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 879.175379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38d21cfcf9df4f8c832640f2b473cbe7 [ 879.245232] env[62109]: DEBUG nova.scheduler.client.report [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 879.247762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg fabc36a5833143cebcc30ad26ed4e16a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 879.248737] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Acquiring lock "5f58014c-e132-4fad-9ba7-bc183318200f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.249077] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Lock "5f58014c-e132-4fad-9ba7-bc183318200f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.259945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fabc36a5833143cebcc30ad26ed4e16a [ 879.424168] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 879.424742] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 879.424955] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 879.425107] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.425284] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 879.425424] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.425564] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 879.425762] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 879.425918] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 879.426111] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 879.426278] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 879.426449] env[62109]: DEBUG nova.virt.hardware [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 879.427821] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2432fd-ae11-4f6b-8a9b-46edae2cd03d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.436200] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd9bfc1-ea32-4b7e-8969-219a4cec2091 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.449389] env[62109]: ERROR nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Traceback (most recent call last): [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] yield resources [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self.driver.spawn(context, instance, image_meta, [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] vm_ref = self.build_virtual_machine(instance, [ 879.449389] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] vif_infos = vmwarevif.get_vif_info(self._session, [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] for vif in network_info: [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] return self._sync_wrapper(fn, *args, **kwargs) [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self.wait() [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self[:] = self._gt.wait() [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] return self._exit_event.wait() [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 879.449779] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] current.throw(*self._exc) [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] result = function(*args, **kwargs) [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] return func(*args, **kwargs) [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] raise e [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] nwinfo = self.network_api.allocate_for_instance( [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] created_port_ids = self._update_ports_for_instance( [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] with excutils.save_and_reraise_exception(): [ 879.450125] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self.force_reraise() [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] raise self.value [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] updated_port = self._update_port( [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] _ensure_no_port_binding_failure(port) [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] raise exception.PortBindingFailed(port_id=port['id']) [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] nova.exception.PortBindingFailed: Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. [ 879.450505] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] [ 879.450505] env[62109]: INFO nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Terminating instance [ 879.452032] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Acquiring lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.452303] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Acquired lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.452303] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 879.452687] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 19f2941b2cbf4341be7f8ddf41d8ed64 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 879.459499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19f2941b2cbf4341be7f8ddf41d8ed64 [ 879.662806] env[62109]: INFO nova.compute.manager [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] [instance: 252b7e84-4f91-4078-a81c-392d622b6ce2] Took 1.02 seconds to deallocate network for instance. [ 879.664879] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg c02b6a5e91f44ae6b9270a175e0e9d28 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 879.700287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c02b6a5e91f44ae6b9270a175e0e9d28 [ 879.751201] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.751787] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 879.753512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg cc0e73282fb74937bf2da10e172c0b74 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 879.754559] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.928s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.755994] env[62109]: INFO nova.compute.claims [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 879.757651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 497e9e60a7cb4c7493790566cef92478 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 879.784782] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc0e73282fb74937bf2da10e172c0b74 [ 879.789653] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 497e9e60a7cb4c7493790566cef92478 [ 879.962335] env[62109]: DEBUG nova.compute.manager [req-573073bb-f39f-4b99-b8c2-3fd0f96ca603 req-ad3c6c7c-d922-40ed-ae03-e46a05f54122 service nova] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Received event network-vif-deleted-b79d0813-93ae-42da-a060-e3a3b0e18d63 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 879.969650] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 880.165700] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.166258] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg e1dc47f1640f4a0cbd765f30bc994c6b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 880.169918] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg e11e42b620e44b249df16a189d91d1fa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 880.174248] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1dc47f1640f4a0cbd765f30bc994c6b [ 880.201019] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e11e42b620e44b249df16a189d91d1fa [ 880.261438] env[62109]: DEBUG nova.compute.utils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.262186] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg edc3fd0566fb4470911c1eba563c68ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 880.264183] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg acf841f197884eaeb8c1ae25ed0fa8e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 880.264992] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 880.265204] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 880.281123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acf841f197884eaeb8c1ae25ed0fa8e5 [ 880.281724] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edc3fd0566fb4470911c1eba563c68ca [ 880.313009] env[62109]: DEBUG nova.policy [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d003a5943d141cc9165e43148dec381', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a669cc62964b05bdaa71442c08a566', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 880.603581] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Successfully created port: 492e9847-c7bd-424b-b9ba-eed84aef6eb0 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.672480] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Releasing lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.673139] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 880.676684] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19148592-c7c6-42cb-b461-8956b7fd0aee {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.687038] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b178aec-71b4-4ddf-a74d-2c47cff3300c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.706077] env[62109]: INFO nova.scheduler.client.report [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Deleted allocations for instance 252b7e84-4f91-4078-a81c-392d622b6ce2 [ 880.711992] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Expecting reply to msg 5efad7fec79d4ed39f613e94c383b605 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 880.718440] env[62109]: WARNING nova.virt.vmwareapi.driver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance f453b695-8abd-44fa-8468-75c6aaeec19a could not be found. [ 880.720041] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 880.720041] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23e72a88-c501-4e74-ac66-8973f59dc307 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.726625] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f182ad-6eb6-45af-a0fe-df9aa063674c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.736897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5efad7fec79d4ed39f613e94c383b605 [ 880.747641] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f453b695-8abd-44fa-8468-75c6aaeec19a could not be found. [ 880.747866] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 880.748099] env[62109]: INFO nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Took 0.07 seconds to destroy the instance on the hypervisor. [ 880.748343] env[62109]: DEBUG oslo.service.loopingcall [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.748548] env[62109]: DEBUG nova.compute.manager [-] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.748636] env[62109]: DEBUG nova.network.neutron [-] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.767199] env[62109]: DEBUG nova.network.neutron [-] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 880.767684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg aa559319f4c94fbf90879830a9baabb8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 880.769290] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 880.771892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg e54060d79dfd4bdda3e9915cf2af8681 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 880.796343] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa559319f4c94fbf90879830a9baabb8 [ 880.811679] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e54060d79dfd4bdda3e9915cf2af8681 [ 880.980303] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a64282b-e174-41a7-bda6-08ba688729d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.989045] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32efff14-2148-44d4-9755-ead35af323e9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.022597] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486002f9-ca61-4023-b45c-b06938aa92b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.030210] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad5c11c-ec4c-4a6b-b017-63c0fa5bf867 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.045814] env[62109]: DEBUG nova.compute.provider_tree [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.046482] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 86481f00b6784f478a6cdf255aa2c895 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 881.054202] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86481f00b6784f478a6cdf255aa2c895 [ 881.214686] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f22245ba-14b0-41a1-a65f-5dbae505921b tempest-ServerMetadataTestJSON-985696139 tempest-ServerMetadataTestJSON-985696139-project-member] Lock "252b7e84-4f91-4078-a81c-392d622b6ce2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.816s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.216560] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 5eea7a41193a4170b7a8c77eadb4e013 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 881.228296] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5eea7a41193a4170b7a8c77eadb4e013 [ 881.269502] env[62109]: DEBUG nova.network.neutron [-] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.269981] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 690e197a7d0d43f895558d3fb332af1c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 881.275762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 7edabaa04ccd4a0a898fddcf6c1f21c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 881.282343] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 690e197a7d0d43f895558d3fb332af1c [ 881.312038] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7edabaa04ccd4a0a898fddcf6c1f21c4 [ 881.366369] env[62109]: DEBUG nova.compute.manager [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Received event network-changed-492e9847-c7bd-424b-b9ba-eed84aef6eb0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 881.366519] env[62109]: DEBUG nova.compute.manager [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Refreshing instance network info cache due to event network-changed-492e9847-c7bd-424b-b9ba-eed84aef6eb0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 881.366732] env[62109]: DEBUG oslo_concurrency.lockutils [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] Acquiring lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.366870] env[62109]: DEBUG oslo_concurrency.lockutils [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] Acquired lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.367023] env[62109]: DEBUG nova.network.neutron [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Refreshing network info cache for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 881.367437] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] Expecting reply to msg b92b0e8b23514d959a4031c3caa08847 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 881.374258] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b92b0e8b23514d959a4031c3caa08847 [ 881.521031] env[62109]: ERROR nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. [ 881.521031] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 881.521031] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 881.521031] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 881.521031] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 881.521031] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 881.521031] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 881.521031] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 881.521031] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.521031] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 881.521031] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.521031] env[62109]: ERROR nova.compute.manager raise self.value [ 881.521031] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 881.521031] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 881.521031] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 881.521031] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 881.521569] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 881.521569] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 881.521569] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. [ 881.521569] env[62109]: ERROR nova.compute.manager [ 881.521569] env[62109]: Traceback (most recent call last): [ 881.521569] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 881.521569] env[62109]: listener.cb(fileno) [ 881.521569] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 881.521569] env[62109]: result = function(*args, **kwargs) [ 881.521569] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 881.521569] env[62109]: return func(*args, **kwargs) [ 881.521569] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 881.521569] env[62109]: raise e [ 881.521569] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 881.521569] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 881.521569] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 881.521569] env[62109]: created_port_ids = self._update_ports_for_instance( [ 881.521569] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 881.521569] env[62109]: with excutils.save_and_reraise_exception(): [ 881.521569] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.521569] env[62109]: self.force_reraise() [ 881.521569] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.521569] env[62109]: raise self.value [ 881.521569] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 881.521569] env[62109]: updated_port = self._update_port( [ 881.521569] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 881.521569] env[62109]: _ensure_no_port_binding_failure(port) [ 881.521569] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 881.521569] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 881.522309] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. [ 881.522309] env[62109]: Removing descriptor: 16 [ 881.556584] env[62109]: DEBUG nova.scheduler.client.report [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 881.556584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 0704a17594664f23ad8e611f11f82462 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 881.569469] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0704a17594664f23ad8e611f11f82462 [ 881.725810] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 881.725810] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 37431886f758464db4ddf0870127f7ef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 881.756890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37431886f758464db4ddf0870127f7ef [ 881.772680] env[62109]: INFO nova.compute.manager [-] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Took 1.02 seconds to deallocate network for instance. [ 881.778252] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 881.804669] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 881.805127] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 881.805607] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.805924] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 881.806181] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.806438] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 881.806765] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 881.807036] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 881.807354] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 881.807638] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 881.807918] env[62109]: DEBUG nova.virt.hardware [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.808900] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0097df1-5b2e-46e6-a747-7cb33406db26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.817145] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50260651-5540-490f-9695-3390c4e9acd4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.832451] env[62109]: ERROR nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Traceback (most recent call last): [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] yield resources [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self.driver.spawn(context, instance, image_meta, [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self._vmops.spawn(context, instance, image_meta, injected_files, [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] vm_ref = self.build_virtual_machine(instance, [ 881.832451] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] vif_infos = vmwarevif.get_vif_info(self._session, [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] for vif in network_info: [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] return self._sync_wrapper(fn, *args, **kwargs) [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self.wait() [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self[:] = self._gt.wait() [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] return self._exit_event.wait() [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 881.832829] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] current.throw(*self._exc) [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] result = function(*args, **kwargs) [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] return func(*args, **kwargs) [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] raise e [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] nwinfo = self.network_api.allocate_for_instance( [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] created_port_ids = self._update_ports_for_instance( [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] with excutils.save_and_reraise_exception(): [ 881.833223] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self.force_reraise() [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] raise self.value [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] updated_port = self._update_port( [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] _ensure_no_port_binding_failure(port) [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] raise exception.PortBindingFailed(port_id=port['id']) [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] nova.exception.PortBindingFailed: Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. [ 881.833588] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] [ 881.834610] env[62109]: INFO nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Terminating instance [ 881.836699] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.883042] env[62109]: DEBUG nova.network.neutron [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 881.969833] env[62109]: DEBUG nova.network.neutron [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.970689] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] Expecting reply to msg 8542dbce5152436ea346caac8d919fc8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 881.979187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8542dbce5152436ea346caac8d919fc8 [ 882.057821] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.058495] env[62109]: DEBUG nova.compute.manager [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 882.060928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg ec43dced3cb84c708958307b1b3ed846 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 882.062057] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.139s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.063971] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 748a56c5e5a745709f979d93b8956fd7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 882.104419] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec43dced3cb84c708958307b1b3ed846 [ 882.106622] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 748a56c5e5a745709f979d93b8956fd7 [ 882.245912] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.334732] env[62109]: INFO nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Took 0.56 seconds to detach 1 volumes for instance. [ 882.337014] env[62109]: DEBUG nova.compute.claims [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 882.337199] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.480096] env[62109]: DEBUG oslo_concurrency.lockutils [req-647a46d1-f12e-4bf9-bc29-ec4666908ae6 req-650c4ecd-20d2-48d4-b31d-98a1bee0b71d service nova] Releasing lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.480563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.480753] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 882.481201] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 5e050c2c14c5475d8b5edb05140ff8da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 882.488274] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e050c2c14c5475d8b5edb05140ff8da [ 882.569500] env[62109]: DEBUG nova.compute.utils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 882.570218] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 8ec8f98ced1d4555ad2e8df23674363c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 882.583370] env[62109]: DEBUG nova.compute.manager [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 882.584811] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ec8f98ced1d4555ad2e8df23674363c [ 882.794062] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd1ca26-dde2-48bf-a9d0-d7e51be168ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.802397] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0394cd52-5b71-4c73-8b26-af0779f7e10f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.805657] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 882.805861] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 882.806586] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 3aebcf2c1c664c0d9a1a34247f782a85 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 882.833868] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3aebcf2c1c664c0d9a1a34247f782a85 [ 882.834776] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c6c790-0d32-4d62-ba4b-f593d9873d69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.842717] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562e19e3-a2b5-4518-8386-bc91e0396969 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.856878] env[62109]: DEBUG nova.compute.provider_tree [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.857381] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 2e4e831d849740d9a8a27be13ecfd05b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 882.867334] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e4e831d849740d9a8a27be13ecfd05b [ 882.997313] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.084612] env[62109]: DEBUG nova.compute.manager [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 883.086320] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 6c2f20a59c2f4164bd591e3353b2f086 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.088090] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.088398] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg ed2016fd2bbd4a6ea9390e1d50ca5c7c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.103780] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed2016fd2bbd4a6ea9390e1d50ca5c7c [ 883.120985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c2f20a59c2f4164bd591e3353b2f086 [ 883.312149] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.312149] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 883.312149] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 883.312149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 97faa555649e45c4a782bc4798ad4afd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.326070] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97faa555649e45c4a782bc4798ad4afd [ 883.359639] env[62109]: DEBUG nova.scheduler.client.report [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 883.362145] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 5c8986f1f80646e881ad31d337ca1a17 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.375099] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c8986f1f80646e881ad31d337ca1a17 [ 883.401859] env[62109]: DEBUG nova.compute.manager [req-200b3460-d633-408b-b860-ab35306cc01c req-e7fa3a90-de8b-45fe-9711-a6a9bfe8a4cb service nova] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Received event network-vif-deleted-492e9847-c7bd-424b-b9ba-eed84aef6eb0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 883.593536] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 35b0f53c3c4942b8a9bb6bbf6efe9128 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.600713] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.602032] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 883.602032] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 883.602032] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57f70d4d-de19-4bf1-876e-e408494b1d4d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.611786] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70eb0ea1-e3f6-4384-afaf-4fd562882cec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.627262] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35b0f53c3c4942b8a9bb6bbf6efe9128 [ 883.633160] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e2e09174-6ba1-44ad-ba3e-cdcae5a2d698 could not be found. [ 883.633355] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 883.633528] env[62109]: INFO nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Took 0.03 seconds to destroy the instance on the hypervisor. [ 883.633756] env[62109]: DEBUG oslo.service.loopingcall [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.633964] env[62109]: DEBUG nova.compute.manager [-] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 883.634052] env[62109]: DEBUG nova.network.neutron [-] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 883.649611] env[62109]: DEBUG nova.network.neutron [-] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.650097] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0b33abf5ef044375b384a6b5f53554ae in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.656810] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b33abf5ef044375b384a6b5f53554ae [ 883.814874] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 883.815174] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 883.815216] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 883.843736] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "refresh_cache-b95c60dc-50c4-4afc-acb0-3308e490b808" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.843887] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquired lock "refresh_cache-b95c60dc-50c4-4afc-acb0-3308e490b808" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.844047] env[62109]: DEBUG nova.network.neutron [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Forcefully refreshing network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 883.844206] env[62109]: DEBUG nova.objects.instance [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lazy-loading 'info_cache' on Instance uuid b95c60dc-50c4-4afc-acb0-3308e490b808 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.844771] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 811928bb9163435281b917880bffcdd5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.854569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 811928bb9163435281b917880bffcdd5 [ 883.864510] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.802s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.865127] env[62109]: ERROR nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Traceback (most recent call last): [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self.driver.spawn(context, instance, image_meta, [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] vm_ref = self.build_virtual_machine(instance, [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] vif_infos = vmwarevif.get_vif_info(self._session, [ 883.865127] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] for vif in network_info: [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] return self._sync_wrapper(fn, *args, **kwargs) [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self.wait() [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self[:] = self._gt.wait() [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] return self._exit_event.wait() [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] result = hub.switch() [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 883.865462] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] return self.greenlet.switch() [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] result = function(*args, **kwargs) [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] return func(*args, **kwargs) [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] raise e [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] nwinfo = self.network_api.allocate_for_instance( [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] created_port_ids = self._update_ports_for_instance( [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] with excutils.save_and_reraise_exception(): [ 883.865822] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] self.force_reraise() [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] raise self.value [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] updated_port = self._update_port( [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] _ensure_no_port_binding_failure(port) [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] raise exception.PortBindingFailed(port_id=port['id']) [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] nova.exception.PortBindingFailed: Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. [ 883.866189] env[62109]: ERROR nova.compute.manager [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] [ 883.866513] env[62109]: DEBUG nova.compute.utils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 883.866957] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.776s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.868482] env[62109]: INFO nova.compute.claims [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.870195] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg c36eedd133a04cbdba797c22ce9ae040 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.871339] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Build of instance 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e was re-scheduled: Binding failed for port 364d95c6-b690-4798-ac6c-92324d6dbd83, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 883.871756] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 883.871971] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquiring lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.872128] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Acquired lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.872282] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.872633] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg aed713cf31de48d6b06679809186e7ef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 883.877873] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aed713cf31de48d6b06679809186e7ef [ 883.903543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c36eedd133a04cbdba797c22ce9ae040 [ 884.097364] env[62109]: DEBUG nova.compute.manager [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 884.118271] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 884.118541] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 884.118691] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.118865] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 884.119014] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.119138] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 884.119334] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 884.119482] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 884.119705] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 884.119788] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 884.119948] env[62109]: DEBUG nova.virt.hardware [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 884.120793] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88c97e0-d311-4ea8-a502-4bf01e03b555 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.129902] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd37893-a25e-4e1f-9ffa-af47401e265d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.142727] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.148125] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Creating folder: Project (dd6c831cfae3480c9ebea53f615c811e). Parent ref: group-v108864. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 884.148377] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92ff32e3-b72a-47ef-813d-56f605e49b8f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.152245] env[62109]: DEBUG nova.network.neutron [-] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.152639] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg afa466311c624128aebb20bb845a05e8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 884.161076] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afa466311c624128aebb20bb845a05e8 [ 884.162087] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Created folder: Project (dd6c831cfae3480c9ebea53f615c811e) in parent group-v108864. [ 884.162265] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Creating folder: Instances. Parent ref: group-v108880. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 884.162695] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d03c360e-c6d2-4ff9-8f03-5da103fe1ca8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.171426] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Created folder: Instances in parent group-v108880. [ 884.171637] env[62109]: DEBUG oslo.service.loopingcall [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.171806] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 884.171986] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2aa2430-e72b-4446-a4f7-2d1f9c337f3f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.187653] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.187653] env[62109]: value = "task-401499" [ 884.187653] env[62109]: _type = "Task" [ 884.187653] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.196220] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401499, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.300936] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.301166] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.347461] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 938e72d1e2d14f3bb640387c817173c8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 884.354832] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 938e72d1e2d14f3bb640387c817173c8 [ 884.378351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg ba11c8eea90c4ff296d3a95ba6647936 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 884.386753] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba11c8eea90c4ff296d3a95ba6647936 [ 884.391809] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 884.474246] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.474776] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg bbd518564f5e46b3907a27d92c103621 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 884.482865] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbd518564f5e46b3907a27d92c103621 [ 884.655085] env[62109]: INFO nova.compute.manager [-] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Took 1.02 seconds to deallocate network for instance. [ 884.657472] env[62109]: DEBUG nova.compute.claims [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 884.657556] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.698607] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401499, 'name': CreateVM_Task, 'duration_secs': 0.248818} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.698772] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 884.699208] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.699368] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.699690] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 884.699934] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef67f58-e91a-4bc0-b340-bc0635ded531 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.704234] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 884.704234] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5280338d-3dde-8870-dbd7-40a72493ea42" [ 884.704234] env[62109]: _type = "Task" [ 884.704234] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.711336] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5280338d-3dde-8870-dbd7-40a72493ea42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.862613] env[62109]: DEBUG nova.network.neutron [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 884.862968] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 8a571cf2e5474e75b91167021d233a4e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 884.869548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a571cf2e5474e75b91167021d233a4e [ 884.977088] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Releasing lock "refresh_cache-3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.977321] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 884.977579] env[62109]: DEBUG nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 884.977757] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 884.992422] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 884.993082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 658d8d4aa71d46e3b890aae3bbe654e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 884.999839] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 658d8d4aa71d46e3b890aae3bbe654e2 [ 885.045235] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60c1b4f-77c5-43f9-9e5d-2e3f3a6a2846 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.053725] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0a7843-810d-4aaf-8089-5d182aba068f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.089088] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f193fcdf-8307-4e76-a8e3-4850511624a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.096079] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f9b0f4-feed-43d2-b371-0aa752d09fe5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.109604] env[62109]: DEBUG nova.compute.provider_tree [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.110081] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg bceb62c964254fac86fb16d9d0182abb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 885.117170] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bceb62c964254fac86fb16d9d0182abb [ 885.214083] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5280338d-3dde-8870-dbd7-40a72493ea42, 'name': SearchDatastore_Task, 'duration_secs': 0.009672} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.214480] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.214769] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.215067] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.215283] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.215512] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.215821] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d8ede17-ad68-4041-8c0f-5fc9faa7800e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.223719] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.223980] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 885.224854] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9412556-67c5-4ac6-8e5a-dad931c6a1ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.230353] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 885.230353] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52e64a13-74f5-0f83-3447-51d2bd58fd70" [ 885.230353] env[62109]: _type = "Task" [ 885.230353] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.237840] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52e64a13-74f5-0f83-3447-51d2bd58fd70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.409905] env[62109]: DEBUG nova.network.neutron [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.410518] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ace1782a84314ebbbc877b1c7f0a3f69 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 885.418511] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ace1782a84314ebbbc877b1c7f0a3f69 [ 885.495443] env[62109]: DEBUG nova.network.neutron [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.496018] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 83575c97cb8143f2832133779363b43f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 885.503485] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83575c97cb8143f2832133779363b43f [ 885.613761] env[62109]: DEBUG nova.scheduler.client.report [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 885.616556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 574f95b2cbbf4c6b874380063b09f83d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 885.630074] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 574f95b2cbbf4c6b874380063b09f83d [ 885.740434] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52e64a13-74f5-0f83-3447-51d2bd58fd70, 'name': SearchDatastore_Task, 'duration_secs': 0.007881} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.741222] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f26746d-ccd9-446c-afc5-adc09d241c61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.746380] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 885.746380] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5272aac5-4383-361d-4c5f-0cdc1afff23d" [ 885.746380] env[62109]: _type = "Task" [ 885.746380] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.753611] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5272aac5-4383-361d-4c5f-0cdc1afff23d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.913595] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Releasing lock "refresh_cache-b95c60dc-50c4-4afc-acb0-3308e490b808" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.913886] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: b95c60dc-50c4-4afc-acb0-3308e490b808] Updated the network info_cache for instance {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 885.914061] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.914247] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.914398] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.914547] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.914687] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.914830] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.914970] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 885.915114] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.915463] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 92f94874a8824396a59df8539b376d4a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 885.924136] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92f94874a8824396a59df8539b376d4a [ 885.998047] env[62109]: INFO nova.compute.manager [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] [instance: 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e] Took 1.02 seconds to deallocate network for instance. [ 885.999937] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 880dbc555659453cbee9cd75f5a89ccf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 886.033401] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 880dbc555659453cbee9cd75f5a89ccf [ 886.118925] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.119636] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 886.121556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 9c5811399c4244fa9e201fc1e94872a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 886.123452] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.667s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.123779] env[62109]: DEBUG nova.objects.instance [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lazy-loading 'resources' on Instance uuid b95c60dc-50c4-4afc-acb0-3308e490b808 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.124147] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg eaa6ac8819b147e290b4a08ea71967a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 886.134002] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaa6ac8819b147e290b4a08ea71967a2 [ 886.151263] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c5811399c4244fa9e201fc1e94872a2 [ 886.257091] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5272aac5-4383-361d-4c5f-0cdc1afff23d, 'name': SearchDatastore_Task, 'duration_secs': 0.009346} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.257397] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.257663] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore2] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 886.257914] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad539de8-a156-492f-bf2c-d5324acfb22e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.264668] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 886.264668] env[62109]: value = "task-401500" [ 886.264668] env[62109]: _type = "Task" [ 886.264668] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.271755] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.418849] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.504745] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg ba7c5837628446758afd87a445504ed9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 886.538488] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba7c5837628446758afd87a445504ed9 [ 886.625047] env[62109]: DEBUG nova.compute.utils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 886.625852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 2a5bf6b904124fa2a00a8cdd6c1a56f5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 886.626960] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 886.627953] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 886.638486] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a5bf6b904124fa2a00a8cdd6c1a56f5 [ 886.675602] env[62109]: DEBUG nova.policy [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0933891c54584b059f68770a8c930f1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d361e27776e47eeadaa4a29b4f9338f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 886.774196] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401500, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.379804} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.774444] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore2] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 886.774687] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.774881] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebdad8dc-6451-450d-b63b-dd805b498978 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.782355] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 886.782355] env[62109]: value = "task-401501" [ 886.782355] env[62109]: _type = "Task" [ 886.782355] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.792995] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401501, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.817360] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efff21a4-ef25-48ec-9f18-674b8065e6f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.824636] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dce67d-4db2-4c04-b464-b8a5779c3a50 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.856120] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b798dd-a3c4-4b19-93c5-b2ae6b164e69 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.863596] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdb4600-0a8b-4e0f-9996-748ec6ae540f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.876474] env[62109]: DEBUG nova.compute.provider_tree [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.877016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 8a7bb0d4b2ed495c86ea86490e367d32 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 886.886161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a7bb0d4b2ed495c86ea86490e367d32 [ 887.013162] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Successfully created port: 2b78f53d-d85a-41f3-91bb-f8df87915fcc {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.026683] env[62109]: INFO nova.scheduler.client.report [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Deleted allocations for instance 3c0ed5ff-4232-4ab5-a91c-d82da0008f8e [ 887.033892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Expecting reply to msg 55bfa276941849d9a62daf45d2e61894 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 887.046674] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55bfa276941849d9a62daf45d2e61894 [ 887.133246] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 887.137294] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 91bfefae5a10436dacbe00d5d06ce775 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 887.171717] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91bfefae5a10436dacbe00d5d06ce775 [ 887.292763] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401501, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063937} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.293317] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.294288] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2917f5-0e25-46f9-98d1-91070e0774b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.324697] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.325164] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d59ce724-56de-4f67-9517-ab7cd27d3863 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.346234] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 887.346234] env[62109]: value = "task-401502" [ 887.346234] env[62109]: _type = "Task" [ 887.346234] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.355044] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.379545] env[62109]: DEBUG nova.scheduler.client.report [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 887.382991] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg 0948eddc49a247dab9cfddbb98dce7d2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 887.394108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0948eddc49a247dab9cfddbb98dce7d2 [ 887.536095] env[62109]: DEBUG oslo_concurrency.lockutils [None req-77b948e5-349d-436a-9f94-c4807bcef6cc tempest-ImagesTestJSON-1803525400 tempest-ImagesTestJSON-1803525400-project-member] Lock "3c0ed5ff-4232-4ab5-a91c-d82da0008f8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.080s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.536969] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg ff7af65776f449afb07b1db79fea2aa2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 887.547824] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff7af65776f449afb07b1db79fea2aa2 [ 887.643242] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg f48593ba9ad9465d825652fce17b5bc8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 887.681472] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f48593ba9ad9465d825652fce17b5bc8 [ 887.693217] env[62109]: DEBUG nova.compute.manager [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Received event network-changed-2b78f53d-d85a-41f3-91bb-f8df87915fcc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 887.694263] env[62109]: DEBUG nova.compute.manager [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Refreshing instance network info cache due to event network-changed-2b78f53d-d85a-41f3-91bb-f8df87915fcc. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 887.695005] env[62109]: DEBUG oslo_concurrency.lockutils [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] Acquiring lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.695273] env[62109]: DEBUG oslo_concurrency.lockutils [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] Acquired lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.695742] env[62109]: DEBUG nova.network.neutron [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Refreshing network info cache for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 887.696323] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] Expecting reply to msg 159819b0ff7c469a9533943471a84886 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 887.704812] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 159819b0ff7c469a9533943471a84886 [ 887.856527] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401502, 'name': ReconfigVM_Task, 'duration_secs': 0.263026} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.857008] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.857749] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa49d487-6057-4e7e-bc4c-52fe25a32e99 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.864044] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 887.864044] env[62109]: value = "task-401503" [ 887.864044] env[62109]: _type = "Task" [ 887.864044] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.873301] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401503, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.886109] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.763s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.889197] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.161s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.891056] env[62109]: INFO nova.compute.claims [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.892892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 289bbb5fb96a4f25bd74025479d773ae in queue reply_7522b64acfeb4981b1f36928b040d568 [ 887.912564] env[62109]: INFO nova.scheduler.client.report [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Deleted allocations for instance b95c60dc-50c4-4afc-acb0-3308e490b808 [ 887.923519] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg e3b6fc539f914b5b879c1522b1bd1f25 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 887.937805] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 289bbb5fb96a4f25bd74025479d773ae [ 887.969137] env[62109]: ERROR nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. [ 887.969137] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 887.969137] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 887.969137] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 887.969137] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 887.969137] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 887.969137] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 887.969137] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 887.969137] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 887.969137] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 887.969137] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 887.969137] env[62109]: ERROR nova.compute.manager raise self.value [ 887.969137] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 887.969137] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 887.969137] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 887.969137] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 887.969636] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 887.969636] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 887.969636] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. [ 887.969636] env[62109]: ERROR nova.compute.manager [ 887.969636] env[62109]: Traceback (most recent call last): [ 887.969636] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 887.969636] env[62109]: listener.cb(fileno) [ 887.969636] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 887.969636] env[62109]: result = function(*args, **kwargs) [ 887.969636] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 887.969636] env[62109]: return func(*args, **kwargs) [ 887.969636] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 887.969636] env[62109]: raise e [ 887.969636] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 887.969636] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 887.969636] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 887.969636] env[62109]: created_port_ids = self._update_ports_for_instance( [ 887.969636] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 887.969636] env[62109]: with excutils.save_and_reraise_exception(): [ 887.969636] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 887.969636] env[62109]: self.force_reraise() [ 887.969636] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 887.969636] env[62109]: raise self.value [ 887.969636] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 887.969636] env[62109]: updated_port = self._update_port( [ 887.969636] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 887.969636] env[62109]: _ensure_no_port_binding_failure(port) [ 887.969636] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 887.969636] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 887.970449] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. [ 887.970449] env[62109]: Removing descriptor: 16 [ 887.980621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3b6fc539f914b5b879c1522b1bd1f25 [ 888.040230] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 888.042203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg b09e2d1cbed14c7597e6c7bd262167e0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 888.074989] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b09e2d1cbed14c7597e6c7bd262167e0 [ 888.146189] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 888.170912] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 888.171154] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 888.171348] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.171485] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 888.171627] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.171770] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 888.171976] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 888.172150] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 888.172313] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 888.172471] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 888.172643] env[62109]: DEBUG nova.virt.hardware [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 888.173540] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587396a8-7b2f-4ed6-abd5-877d6d20b73a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.181644] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd43937-f3a8-4c30-9595-c92fcf553d44 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.197895] env[62109]: ERROR nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Traceback (most recent call last): [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] yield resources [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self.driver.spawn(context, instance, image_meta, [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] vm_ref = self.build_virtual_machine(instance, [ 888.197895] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] vif_infos = vmwarevif.get_vif_info(self._session, [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] for vif in network_info: [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] return self._sync_wrapper(fn, *args, **kwargs) [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self.wait() [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self[:] = self._gt.wait() [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] return self._exit_event.wait() [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 888.198417] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] current.throw(*self._exc) [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] result = function(*args, **kwargs) [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] return func(*args, **kwargs) [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] raise e [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] nwinfo = self.network_api.allocate_for_instance( [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] created_port_ids = self._update_ports_for_instance( [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] with excutils.save_and_reraise_exception(): [ 888.198888] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self.force_reraise() [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] raise self.value [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] updated_port = self._update_port( [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] _ensure_no_port_binding_failure(port) [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] raise exception.PortBindingFailed(port_id=port['id']) [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] nova.exception.PortBindingFailed: Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. [ 888.199460] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] [ 888.199460] env[62109]: INFO nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Terminating instance [ 888.200404] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.222161] env[62109]: DEBUG nova.network.neutron [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 888.316397] env[62109]: DEBUG nova.network.neutron [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.316922] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] Expecting reply to msg b57cc3b4add94816a7e532505094d99b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 888.326623] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b57cc3b4add94816a7e532505094d99b [ 888.374003] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401503, 'name': Rename_Task, 'duration_secs': 0.13686} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.374302] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 888.374558] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b080977c-81af-41f0-ade5-7b67297fb80e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.380960] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 888.380960] env[62109]: value = "task-401504" [ 888.380960] env[62109]: _type = "Task" [ 888.380960] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.388685] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.396935] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg af12ef392d6a430faeff9be2f27b8fe4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 888.408498] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af12ef392d6a430faeff9be2f27b8fe4 [ 888.434319] env[62109]: DEBUG oslo_concurrency.lockutils [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Lock "b95c60dc-50c4-4afc-acb0-3308e490b808" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.692s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.434663] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-74cd6ac1-0bca-4849-880e-76034dd5e9ad tempest-ServerShowV247Test-1781566314 tempest-ServerShowV247Test-1781566314-project-member] Expecting reply to msg e94b878d739446f0a8f1e37eb150048b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 888.449440] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e94b878d739446f0a8f1e37eb150048b [ 888.562711] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.819579] env[62109]: DEBUG oslo_concurrency.lockutils [req-ac205ec7-2b72-4ca1-adc9-2f0a462faaa4 req-b645a801-5b0b-4570-a0de-442f669f2612 service nova] Releasing lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.820022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquired lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.820216] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 888.820685] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg d2b286a21250481ab64f95557eef81f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 888.828833] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2b286a21250481ab64f95557eef81f7 [ 888.897786] env[62109]: DEBUG oslo_vmware.api [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401504, 'name': PowerOnVM_Task, 'duration_secs': 0.427361} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.898143] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 888.898356] env[62109]: INFO nova.compute.manager [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Took 4.80 seconds to spawn the instance on the hypervisor. [ 888.898534] env[62109]: DEBUG nova.compute.manager [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 888.901886] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c840dc-ef46-4c8a-9c04-badf114ebdaa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.909888] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 38e64b27d17049d1b025ab3deb727c32 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 888.967919] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38e64b27d17049d1b025ab3deb727c32 [ 889.190737] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0946f6b1-fc13-4693-b714-adb67241cbf4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.200166] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff204048-ee91-457c-aa28-5f327fe577f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.235443] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8412584d-de83-490e-93a4-31f359633dbd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.243682] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70d4ec9-1825-4d13-818d-7134a438ba23 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.258023] env[62109]: DEBUG nova.compute.provider_tree [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.258745] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg ae52b30ad0d24ed8b79cbf70abd87db8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 889.303074] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae52b30ad0d24ed8b79cbf70abd87db8 [ 889.340160] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 889.394305] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg d3e2fa5f8c9349bba42e2be41c1f2139 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 889.405999] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3e2fa5f8c9349bba42e2be41c1f2139 [ 889.419631] env[62109]: INFO nova.compute.manager [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Took 25.61 seconds to build instance. [ 889.419796] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 4a7ceebab79c44488b3021cf1cfaa256 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 889.432155] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a7ceebab79c44488b3021cf1cfaa256 [ 889.467390] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.467916] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg dabbfc4660324bafb3cdcc930304c854 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 889.489876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dabbfc4660324bafb3cdcc930304c854 [ 889.720098] env[62109]: DEBUG nova.compute.manager [req-211de4d2-28d2-4035-9f77-d8cb0ef80f74 req-7c20430f-ac97-4b0d-8d4a-ab4a4334950b service nova] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Received event network-vif-deleted-2b78f53d-d85a-41f3-91bb-f8df87915fcc {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 889.761693] env[62109]: DEBUG nova.scheduler.client.report [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.764138] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 60b4a9e27b1b4ad3acd27c8bc0feae63 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 889.784893] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60b4a9e27b1b4ad3acd27c8bc0feae63 [ 889.896712] env[62109]: INFO nova.compute.manager [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Rebuilding instance [ 889.925583] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d6cec8f9-58e6-4c1c-ac20-468cef5a3ec1 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.721s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.926364] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 4e3b56f2685e4cef81c8bf966892e40f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 889.936808] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e3b56f2685e4cef81c8bf966892e40f [ 889.943484] env[62109]: DEBUG nova.compute.manager [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 889.944331] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b30b89-8eaa-4997-bdc3-b7ee2ddae6a6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.952918] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 0a99a3572ac4484c880a01de1d055cd3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 889.969699] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Releasing lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.970086] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 889.970313] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 889.970576] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23eff8db-ae43-47bd-a94a-3d8d1f5620cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.980621] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de59e18-efd2-49c5-bd27-3dc118eebbf5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.992792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a99a3572ac4484c880a01de1d055cd3 [ 890.005127] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3f99ec88-f05f-4583-b08b-d40fb37e275e could not be found. [ 890.005343] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 890.005514] env[62109]: INFO nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 890.005743] env[62109]: DEBUG oslo.service.loopingcall [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.005958] env[62109]: DEBUG nova.compute.manager [-] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 890.006037] env[62109]: DEBUG nova.network.neutron [-] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 890.019862] env[62109]: DEBUG nova.network.neutron [-] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 890.020357] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 74b363f162f64ea28942e54394529a7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 890.027432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74b363f162f64ea28942e54394529a7d [ 890.267135] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.267713] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 890.270646] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 2956032d73cd4af8af90c00450ec1c76 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 890.271366] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.718s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.272778] env[62109]: INFO nova.compute.claims [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.274286] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 341821e44546451598a8da6e8bd511c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 890.306768] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2956032d73cd4af8af90c00450ec1c76 [ 890.311762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 341821e44546451598a8da6e8bd511c2 [ 890.428125] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 890.430037] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 1ec6a4bcc445470898aca47a123b17be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 890.455324] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 890.455620] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bbb88a1-104a-412a-acc7-31e680793363 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.462697] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 890.462697] env[62109]: value = "task-401505" [ 890.462697] env[62109]: _type = "Task" [ 890.462697] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.470845] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401505, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.472158] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ec6a4bcc445470898aca47a123b17be [ 890.522054] env[62109]: DEBUG nova.network.neutron [-] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.522541] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1068125555244b09a75963a4cdf65b6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 890.531282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1068125555244b09a75963a4cdf65b6d [ 890.777383] env[62109]: DEBUG nova.compute.utils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 890.778008] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg e2bc5b106b3f4cd2a8758d4e5152413a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 890.780073] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg e6e6c4c2cd1846778859c86190a41ff1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 890.781053] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 890.781214] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 890.787493] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6e6c4c2cd1846778859c86190a41ff1 [ 890.796026] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2bc5b106b3f4cd2a8758d4e5152413a [ 890.851741] env[62109]: DEBUG nova.policy [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'feae790f4343445c86cfb1b39cb9636e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e10657828d9480b948e59b98490572b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 890.951862] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.973262] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401505, 'name': PowerOffVM_Task, 'duration_secs': 0.118518} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.973539] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 890.973755] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 890.974574] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2a52cf-8314-45d9-ae54-37abc628e3ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.981069] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 890.981297] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d06ff7b-5b08-48c9-a00e-58943e371265 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.003845] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 891.004175] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Deleting contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 891.004387] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Deleting the datastore file [datastore2] 309a7bae-82f5-4b9e-ac86-e0f1803f2585 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.004734] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed146c33-aefb-4593-bed8-eb0072446846 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.012183] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 891.012183] env[62109]: value = "task-401507" [ 891.012183] env[62109]: _type = "Task" [ 891.012183] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.019523] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401507, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.024994] env[62109]: INFO nova.compute.manager [-] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Took 1.02 seconds to deallocate network for instance. [ 891.027123] env[62109]: DEBUG nova.compute.claims [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 891.027301] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.281860] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 891.283639] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 81638611b5d24e09b857d983eb43d783 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 891.316135] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Successfully created port: 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.334474] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81638611b5d24e09b857d983eb43d783 [ 891.501564] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69315230-a163-4366-92a9-144965405465 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.509470] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb17034-9e7e-457a-a423-92156ab262b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.541311] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd83f512-20cf-4155-9c32-5dc034877df3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.547369] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401507, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219291} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.547993] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.548214] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Deleted contents of the VM from datastore datastore2 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 891.548575] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 891.550428] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 44aec7e797af44d8b63b1d7da11dc6ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 891.556094] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc1618b-d343-4ab4-b3c4-955f27dfaa70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.577928] env[62109]: DEBUG nova.compute.provider_tree [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.578528] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg d7bfb42b9e0c49e2b8d7b03ad61284ef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 891.586602] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44aec7e797af44d8b63b1d7da11dc6ec [ 891.587120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7bfb42b9e0c49e2b8d7b03ad61284ef [ 891.791404] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg b7fe696c29184e0db4b078d2b11af4a4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 891.822141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7fe696c29184e0db4b078d2b11af4a4 [ 892.056114] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 74060039fa514a1ebe4dea0277564220 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 892.086203] env[62109]: DEBUG nova.scheduler.client.report [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 892.086203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg c3931f5df9334d72a2caa17e2e3a9778 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 892.087650] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74060039fa514a1ebe4dea0277564220 [ 892.096435] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3931f5df9334d72a2caa17e2e3a9778 [ 892.254366] env[62109]: DEBUG nova.compute.manager [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Received event network-changed-2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 892.254523] env[62109]: DEBUG nova.compute.manager [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Refreshing instance network info cache due to event network-changed-2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 892.254833] env[62109]: DEBUG oslo_concurrency.lockutils [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] Acquiring lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.254952] env[62109]: DEBUG oslo_concurrency.lockutils [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] Acquired lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.255110] env[62109]: DEBUG nova.network.neutron [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Refreshing network info cache for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 892.255519] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] Expecting reply to msg d2c8651daff0459d9e1fb8dde08db9b3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 892.263590] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2c8651daff0459d9e1fb8dde08db9b3 [ 892.294196] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 892.319519] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 892.319749] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 892.319901] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.320098] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 892.320249] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.320396] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 892.320596] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 892.320751] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 892.320914] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 892.321073] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 892.321244] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.322090] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e41b3a-05ff-4b4f-baaf-242f8c637f4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.332026] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793283eb-c7d3-40c2-b563-1768c72ab0dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.500051] env[62109]: ERROR nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. [ 892.500051] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 892.500051] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.500051] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 892.500051] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.500051] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 892.500051] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.500051] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 892.500051] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.500051] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 892.500051] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.500051] env[62109]: ERROR nova.compute.manager raise self.value [ 892.500051] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.500051] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 892.500051] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.500051] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 892.500582] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.500582] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 892.500582] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. [ 892.500582] env[62109]: ERROR nova.compute.manager [ 892.500582] env[62109]: Traceback (most recent call last): [ 892.500582] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 892.500582] env[62109]: listener.cb(fileno) [ 892.500582] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 892.500582] env[62109]: result = function(*args, **kwargs) [ 892.500582] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 892.500582] env[62109]: return func(*args, **kwargs) [ 892.500582] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 892.500582] env[62109]: raise e [ 892.500582] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.500582] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 892.500582] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.500582] env[62109]: created_port_ids = self._update_ports_for_instance( [ 892.500582] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.500582] env[62109]: with excutils.save_and_reraise_exception(): [ 892.500582] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.500582] env[62109]: self.force_reraise() [ 892.500582] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.500582] env[62109]: raise self.value [ 892.500582] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.500582] env[62109]: updated_port = self._update_port( [ 892.500582] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.500582] env[62109]: _ensure_no_port_binding_failure(port) [ 892.500582] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.500582] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 892.501348] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. [ 892.501348] env[62109]: Removing descriptor: 16 [ 892.501348] env[62109]: ERROR nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Traceback (most recent call last): [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] yield resources [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self.driver.spawn(context, instance, image_meta, [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 892.501348] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] vm_ref = self.build_virtual_machine(instance, [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] vif_infos = vmwarevif.get_vif_info(self._session, [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] for vif in network_info: [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] return self._sync_wrapper(fn, *args, **kwargs) [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self.wait() [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self[:] = self._gt.wait() [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] return self._exit_event.wait() [ 892.501652] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] result = hub.switch() [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] return self.greenlet.switch() [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] result = function(*args, **kwargs) [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] return func(*args, **kwargs) [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] raise e [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] nwinfo = self.network_api.allocate_for_instance( [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.502027] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] created_port_ids = self._update_ports_for_instance( [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] with excutils.save_and_reraise_exception(): [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self.force_reraise() [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] raise self.value [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] updated_port = self._update_port( [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] _ensure_no_port_binding_failure(port) [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.502400] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] raise exception.PortBindingFailed(port_id=port['id']) [ 892.502740] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] nova.exception.PortBindingFailed: Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. [ 892.502740] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] [ 892.502740] env[62109]: INFO nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Terminating instance [ 892.503363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.579933] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 892.579933] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 892.579933] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.580181] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 892.580243] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.580382] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 892.580589] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 892.580748] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 892.580911] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 892.581069] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 892.581237] env[62109]: DEBUG nova.virt.hardware [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.582096] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06a7fa5-0a19-4bc1-bf9d-4ae221100371 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.589618] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.590079] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 892.591675] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 9b8f7baf61944097ad84acf05994cdc4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 892.592717] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.271s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.594022] env[62109]: INFO nova.compute.claims [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.595439] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 5367a83b4a934b63ab645c3a7156b19f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 892.597381] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d65ecd-41fa-44b5-b2ca-36dfce5e8df9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.612886] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.617885] env[62109]: DEBUG oslo.service.loopingcall [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.618196] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 892.618408] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7dd95845-72eb-4b4f-b7da-433901936867 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.633584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b8f7baf61944097ad84acf05994cdc4 [ 892.634166] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5367a83b4a934b63ab645c3a7156b19f [ 892.637287] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.637287] env[62109]: value = "task-401508" [ 892.637287] env[62109]: _type = "Task" [ 892.637287] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.646977] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401508, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.787193] env[62109]: DEBUG nova.network.neutron [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.867639] env[62109]: DEBUG nova.network.neutron [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.868296] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] Expecting reply to msg 03ea03ec311b44d6801ec60cc25ae084 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 892.877913] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03ea03ec311b44d6801ec60cc25ae084 [ 893.105144] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg d25cc831464246e3bdbc56f8573535c0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 893.106839] env[62109]: DEBUG nova.compute.utils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 893.107402] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg a61ccc6e95db4300b6ab111c6a3d449b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 893.108726] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 893.108896] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 893.113332] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d25cc831464246e3bdbc56f8573535c0 [ 893.118684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a61ccc6e95db4300b6ab111c6a3d449b [ 893.147598] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401508, 'name': CreateVM_Task, 'duration_secs': 0.250873} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.149098] env[62109]: DEBUG nova.policy [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'feae790f4343445c86cfb1b39cb9636e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e10657828d9480b948e59b98490572b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 893.150464] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 893.150876] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.151034] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.151343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 893.151889] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ef4ccb3-a02c-4b38-b13e-a7b5dacc2d4d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.156754] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 893.156754] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]523d3598-5d5c-20e2-3a0d-eadc5da027b5" [ 893.156754] env[62109]: _type = "Task" [ 893.156754] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.164212] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]523d3598-5d5c-20e2-3a0d-eadc5da027b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.348530] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Acquiring lock "8bd1a8aa-844b-47ca-9296-0c30af695984" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.348804] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Lock "8bd1a8aa-844b-47ca-9296-0c30af695984" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.370587] env[62109]: DEBUG oslo_concurrency.lockutils [req-e3691792-c9ff-46c4-954e-af7581ba148e req-6a1173e9-e09d-462b-957a-3ab6c0ca833d service nova] Releasing lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.371086] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquired lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.371283] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 893.371722] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 9c4550cd426146638c7a2e2727cc2e90 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 893.379763] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c4550cd426146638c7a2e2727cc2e90 [ 893.455957] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Successfully created port: 84f69f0f-65c8-4a94-811d-05ed8f7a266a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.612081] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 893.613801] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 9603ac919c104d68a0f1ab35b71e4fa3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 893.650361] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9603ac919c104d68a0f1ab35b71e4fa3 [ 893.666372] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]523d3598-5d5c-20e2-3a0d-eadc5da027b5, 'name': SearchDatastore_Task, 'duration_secs': 0.008334} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.666668] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.666903] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.667126] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.667270] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.667439] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.667699] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d266837-09da-4b7b-bccb-c6f65c6cf710 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.675714] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.675905] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 893.676645] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75b8a4b9-3195-49c8-bb8b-f5d08677e883 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.684446] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 893.684446] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52089e22-c2e1-7b15-50c7-e3bc666692f4" [ 893.684446] env[62109]: _type = "Task" [ 893.684446] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.691827] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52089e22-c2e1-7b15-50c7-e3bc666692f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.848837] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480c6c4d-dc9c-4c60-ab3e-9dea9105b5a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.858273] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300d2d2a-26db-4575-b23b-a4d172f5614d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.890400] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf4c710-a0c7-467a-8c2f-85964bc0269c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.899391] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba19d817-264f-4ba0-9fd2-baca8c5a70d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.905796] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 893.915415] env[62109]: DEBUG nova.compute.provider_tree [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.915990] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 0b64da0233dc47dcb3ee769775c2f128 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 893.922887] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b64da0233dc47dcb3ee769775c2f128 [ 894.000057] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.000579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 8e5d5c4b3385484784bb4e72294ceded in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.009996] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e5d5c4b3385484784bb4e72294ceded [ 894.129251] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg f652e3424282464e96ba31edd87eb4fe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.159880] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f652e3424282464e96ba31edd87eb4fe [ 894.196290] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52089e22-c2e1-7b15-50c7-e3bc666692f4, 'name': SearchDatastore_Task, 'duration_secs': 0.007858} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.197216] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdd1c64c-d7d6-41d6-a30c-4a5424ecab68 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.203651] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 894.203651] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]522ca398-eaaa-d9f9-01a3-3a6683918ce2" [ 894.203651] env[62109]: _type = "Task" [ 894.203651] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.212534] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]522ca398-eaaa-d9f9-01a3-3a6683918ce2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.289748] env[62109]: DEBUG nova.compute.manager [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Received event network-vif-deleted-2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 894.290102] env[62109]: DEBUG nova.compute.manager [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Received event network-changed-84f69f0f-65c8-4a94-811d-05ed8f7a266a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 894.290400] env[62109]: DEBUG nova.compute.manager [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Refreshing instance network info cache due to event network-changed-84f69f0f-65c8-4a94-811d-05ed8f7a266a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 894.290728] env[62109]: DEBUG oslo_concurrency.lockutils [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] Acquiring lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.290986] env[62109]: DEBUG oslo_concurrency.lockutils [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] Acquired lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.291256] env[62109]: DEBUG nova.network.neutron [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Refreshing network info cache for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 894.291875] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] Expecting reply to msg c2377025eafe41e0ac42893778b062bb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.299776] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2377025eafe41e0ac42893778b062bb [ 894.301509] env[62109]: ERROR nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. [ 894.301509] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 894.301509] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.301509] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 894.301509] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 894.301509] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 894.301509] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 894.301509] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 894.301509] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.301509] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 894.301509] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.301509] env[62109]: ERROR nova.compute.manager raise self.value [ 894.301509] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 894.301509] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 894.301509] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.301509] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 894.302016] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.302016] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 894.302016] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. [ 894.302016] env[62109]: ERROR nova.compute.manager [ 894.302331] env[62109]: Traceback (most recent call last): [ 894.302425] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 894.302425] env[62109]: listener.cb(fileno) [ 894.302500] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 894.302500] env[62109]: result = function(*args, **kwargs) [ 894.302569] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 894.302569] env[62109]: return func(*args, **kwargs) [ 894.302644] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 894.302644] env[62109]: raise e [ 894.302719] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.302719] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 894.302787] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 894.302787] env[62109]: created_port_ids = self._update_ports_for_instance( [ 894.302854] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 894.302854] env[62109]: with excutils.save_and_reraise_exception(): [ 894.302922] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.302922] env[62109]: self.force_reraise() [ 894.302996] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.302996] env[62109]: raise self.value [ 894.303063] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 894.303063] env[62109]: updated_port = self._update_port( [ 894.303131] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.303131] env[62109]: _ensure_no_port_binding_failure(port) [ 894.303198] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.303198] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 894.303274] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. [ 894.303320] env[62109]: Removing descriptor: 16 [ 894.318427] env[62109]: DEBUG nova.network.neutron [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.399771] env[62109]: DEBUG nova.network.neutron [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.400690] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] Expecting reply to msg f8379a8f6d1d4807a59b81d78c16cd28 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.409562] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8379a8f6d1d4807a59b81d78c16cd28 [ 894.418548] env[62109]: DEBUG nova.scheduler.client.report [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 894.421703] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg ffab83a4d7fc4dd1a5d667c875874a34 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.437895] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffab83a4d7fc4dd1a5d667c875874a34 [ 894.510263] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Releasing lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.510934] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 894.511281] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 894.511678] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df1f4073-c2a5-48c2-9d07-425199782a5c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.520837] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec7ad87-4879-470e-b19a-6362f1d48190 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.543240] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a38b70f-eabe-4b11-a371-cf971184211f could not be found. [ 894.543623] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 894.543930] env[62109]: INFO nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 894.544289] env[62109]: DEBUG oslo.service.loopingcall [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.544835] env[62109]: DEBUG nova.compute.manager [-] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 894.545021] env[62109]: DEBUG nova.network.neutron [-] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 894.561827] env[62109]: DEBUG nova.network.neutron [-] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.562545] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ab0f294ef17b426a9d3f5640b370e120 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.569464] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab0f294ef17b426a9d3f5640b370e120 [ 894.633445] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 894.665758] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 894.666213] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 894.666485] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.666776] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 894.667023] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.667267] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 894.667574] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 894.667836] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 894.668179] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 894.668536] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 894.668890] env[62109]: DEBUG nova.virt.hardware [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 894.670051] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e321509a-94aa-4d30-b5a9-669a9855e8de {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.678780] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb293a8-f64e-4969-a1c3-b9e27f791a04 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.692685] env[62109]: ERROR nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Traceback (most recent call last): [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] yield resources [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self.driver.spawn(context, instance, image_meta, [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] vm_ref = self.build_virtual_machine(instance, [ 894.692685] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] vif_infos = vmwarevif.get_vif_info(self._session, [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] for vif in network_info: [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] return self._sync_wrapper(fn, *args, **kwargs) [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self.wait() [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self[:] = self._gt.wait() [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] return self._exit_event.wait() [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 894.693097] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] current.throw(*self._exc) [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] result = function(*args, **kwargs) [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] return func(*args, **kwargs) [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] raise e [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] nwinfo = self.network_api.allocate_for_instance( [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] created_port_ids = self._update_ports_for_instance( [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] with excutils.save_and_reraise_exception(): [ 894.693472] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self.force_reraise() [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] raise self.value [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] updated_port = self._update_port( [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] _ensure_no_port_binding_failure(port) [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] raise exception.PortBindingFailed(port_id=port['id']) [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] nova.exception.PortBindingFailed: Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. [ 894.693856] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] [ 894.694243] env[62109]: INFO nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Terminating instance [ 894.696248] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.712627] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]522ca398-eaaa-d9f9-01a3-3a6683918ce2, 'name': SearchDatastore_Task, 'duration_secs': 0.00912} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.712858] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.713097] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 894.713328] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9899a301-4725-47ae-9e91-5c656e23402c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.720182] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 894.720182] env[62109]: value = "task-401509" [ 894.720182] env[62109]: _type = "Task" [ 894.720182] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.726862] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.902941] env[62109]: DEBUG oslo_concurrency.lockutils [req-94f20f62-5dce-41a7-aae8-664d440a7c5c req-1f3b5aa1-ab88-4a26-990a-2db7ccaec64b service nova] Releasing lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.903533] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquired lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.903839] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 894.904503] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 03c22c120ba84674bbe336619246b607 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.912783] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03c22c120ba84674bbe336619246b607 [ 894.932411] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.933225] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 894.935607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg cb067e0d516e4a2795f5ef89cf0e7121 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.937101] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.724s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.938756] env[62109]: INFO nova.compute.claims [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.940363] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg f0114c01a24f457f910584459f328502 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 894.972328] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb067e0d516e4a2795f5ef89cf0e7121 [ 894.980104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0114c01a24f457f910584459f328502 [ 895.065211] env[62109]: DEBUG nova.network.neutron [-] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.065771] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bbf5050d976044b29a2a83c26c329ef6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 895.079055] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbf5050d976044b29a2a83c26c329ef6 [ 895.229774] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.420843} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.230040] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 895.230258] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.230510] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32936ec5-b042-47fb-95dc-f11c3f0ea3c9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.236765] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 895.236765] env[62109]: value = "task-401510" [ 895.236765] env[62109]: _type = "Task" [ 895.236765] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.245667] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401510, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.426021] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.447237] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg e151d3da9d504213852168e25698db9e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 895.449311] env[62109]: DEBUG nova.compute.utils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 895.449583] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg c4e631d298174a9aa6863a3972593697 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 895.451647] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 895.451647] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 895.456404] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e151d3da9d504213852168e25698db9e [ 895.461422] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4e631d298174a9aa6863a3972593697 [ 895.504826] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.505367] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg c66edf2b2d7644b3a932b5f61108f874 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 895.512863] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c66edf2b2d7644b3a932b5f61108f874 [ 895.523964] env[62109]: DEBUG nova.policy [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c52ce34c6a5436b862e2d0d2732c59a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '382a0ffa122c435cac5fd7a5e309a08d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 895.567835] env[62109]: INFO nova.compute.manager [-] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Took 1.02 seconds to deallocate network for instance. [ 895.570062] env[62109]: DEBUG nova.compute.claims [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 895.570228] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.745884] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401510, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059755} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.746263] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.747059] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e049a521-0557-4b96-b443-2dc36b8a63f9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.767421] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.767668] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0533744-634d-4a51-9b9f-8034f4d3c46e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.787359] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 895.787359] env[62109]: value = "task-401511" [ 895.787359] env[62109]: _type = "Task" [ 895.787359] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.795714] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401511, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.813236] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Successfully created port: e979a70d-ad81-4d49-a987-fcf30691d88c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.955102] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 895.956935] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 47ab1f0319ee4d2abf4cb68e92318316 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 896.000736] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47ab1f0319ee4d2abf4cb68e92318316 [ 896.010810] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Releasing lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.011332] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 896.011570] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 896.013725] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a525362-84f9-412a-a1c0-5c723945dcaa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.022991] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462425f9-6eb1-4b51-8fa4-265b2268f7dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.051857] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a6d10c8-bd2b-40dd-9897-8f30223abe81 could not be found. [ 896.052101] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 896.052287] env[62109]: INFO nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Took 0.04 seconds to destroy the instance on the hypervisor. [ 896.052533] env[62109]: DEBUG oslo.service.loopingcall [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.052762] env[62109]: DEBUG nova.compute.manager [-] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 896.052855] env[62109]: DEBUG nova.network.neutron [-] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.073887] env[62109]: DEBUG nova.network.neutron [-] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 896.074390] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6367bb70e0014d9d88c97189ecbaeee1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 896.083121] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6367bb70e0014d9d88c97189ecbaeee1 [ 896.164061] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6435e8-061d-4e52-bf67-c99bbfe7f615 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.171605] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927b787f-ca3a-453d-a927-bf15203b0697 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.205846] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169769a5-8567-4ae2-973d-350eb34a34d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.215018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8570195d-90c6-456c-9345-88c212ac4548 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.229945] env[62109]: DEBUG nova.compute.provider_tree [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.230535] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg e977afe35c15448ebf41e48ee7828344 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 896.244113] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e977afe35c15448ebf41e48ee7828344 [ 896.298108] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401511, 'name': ReconfigVM_Task, 'duration_secs': 0.260431} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.298403] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.299002] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-640300f5-f37a-46ac-afff-ba6f3b521e82 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.306519] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 896.306519] env[62109]: value = "task-401512" [ 896.306519] env[62109]: _type = "Task" [ 896.306519] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.312808] env[62109]: DEBUG nova.compute.manager [req-30f9be4d-5850-4145-9186-58489f861efd req-f93dbf2c-b420-4511-b2e7-3d12a3d656cd service nova] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Received event network-vif-deleted-84f69f0f-65c8-4a94-811d-05ed8f7a266a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 896.319033] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401512, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.463880] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg d5c51de87967450297bc94b005d9a3da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 896.482592] env[62109]: DEBUG nova.compute.manager [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Received event network-changed-e979a70d-ad81-4d49-a987-fcf30691d88c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 896.482772] env[62109]: DEBUG nova.compute.manager [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Refreshing instance network info cache due to event network-changed-e979a70d-ad81-4d49-a987-fcf30691d88c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 896.482891] env[62109]: DEBUG oslo_concurrency.lockutils [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] Acquiring lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.483035] env[62109]: DEBUG oslo_concurrency.lockutils [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] Acquired lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.483195] env[62109]: DEBUG nova.network.neutron [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Refreshing network info cache for port e979a70d-ad81-4d49-a987-fcf30691d88c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 896.484302] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] Expecting reply to msg d80dc17127b341e39167f24a04babe2b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 896.493337] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d80dc17127b341e39167f24a04babe2b [ 896.500542] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5c51de87967450297bc94b005d9a3da [ 896.576267] env[62109]: DEBUG nova.network.neutron [-] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.576779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 57795dc17e16424786c8cbd38f76daad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 896.584514] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57795dc17e16424786c8cbd38f76daad [ 896.680330] env[62109]: ERROR nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. [ 896.680330] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 896.680330] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 896.680330] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 896.680330] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 896.680330] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 896.680330] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 896.680330] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 896.680330] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 896.680330] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 896.680330] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 896.680330] env[62109]: ERROR nova.compute.manager raise self.value [ 896.680330] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 896.680330] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 896.680330] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 896.680330] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 896.680814] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 896.680814] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 896.680814] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. [ 896.680814] env[62109]: ERROR nova.compute.manager [ 896.680814] env[62109]: Traceback (most recent call last): [ 896.680814] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 896.680814] env[62109]: listener.cb(fileno) [ 896.680814] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 896.680814] env[62109]: result = function(*args, **kwargs) [ 896.680814] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 896.680814] env[62109]: return func(*args, **kwargs) [ 896.680814] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 896.680814] env[62109]: raise e [ 896.680814] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 896.680814] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 896.680814] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 896.680814] env[62109]: created_port_ids = self._update_ports_for_instance( [ 896.680814] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 896.680814] env[62109]: with excutils.save_and_reraise_exception(): [ 896.680814] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 896.680814] env[62109]: self.force_reraise() [ 896.680814] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 896.680814] env[62109]: raise self.value [ 896.680814] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 896.680814] env[62109]: updated_port = self._update_port( [ 896.680814] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 896.680814] env[62109]: _ensure_no_port_binding_failure(port) [ 896.680814] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 896.680814] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 896.681553] env[62109]: nova.exception.PortBindingFailed: Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. [ 896.681553] env[62109]: Removing descriptor: 19 [ 896.733354] env[62109]: DEBUG nova.scheduler.client.report [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.735805] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 287d161db4ee488c83ae5a7e777a1220 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 896.752830] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 287d161db4ee488c83ae5a7e777a1220 [ 896.816673] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401512, 'name': Rename_Task, 'duration_secs': 0.126589} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.816937] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 896.817170] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-afaccd5d-180e-4f99-83b9-1f1b173d8621 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.823179] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 896.823179] env[62109]: value = "task-401513" [ 896.823179] env[62109]: _type = "Task" [ 896.823179] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.830126] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.967141] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 896.994708] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.995761] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.995761] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.995761] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.995761] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.995761] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.996016] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.996108] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.996285] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.996449] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.996622] env[62109]: DEBUG nova.virt.hardware [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.997508] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e87df46-fb58-466a-9df2-44ab998b1d8f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.001046] env[62109]: DEBUG nova.network.neutron [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.008031] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94501c1f-e3f6-4baa-b0f1-f935328aba81 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.023756] env[62109]: ERROR nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Traceback (most recent call last): [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] yield resources [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self.driver.spawn(context, instance, image_meta, [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] vm_ref = self.build_virtual_machine(instance, [ 897.023756] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] vif_infos = vmwarevif.get_vif_info(self._session, [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] for vif in network_info: [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] return self._sync_wrapper(fn, *args, **kwargs) [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self.wait() [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self[:] = self._gt.wait() [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] return self._exit_event.wait() [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 897.024191] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] current.throw(*self._exc) [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] result = function(*args, **kwargs) [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] return func(*args, **kwargs) [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] raise e [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] nwinfo = self.network_api.allocate_for_instance( [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] created_port_ids = self._update_ports_for_instance( [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] with excutils.save_and_reraise_exception(): [ 897.024588] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self.force_reraise() [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] raise self.value [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] updated_port = self._update_port( [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] _ensure_no_port_binding_failure(port) [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] raise exception.PortBindingFailed(port_id=port['id']) [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] nova.exception.PortBindingFailed: Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. [ 897.025038] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] [ 897.025038] env[62109]: INFO nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Terminating instance [ 897.026764] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Acquiring lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.079210] env[62109]: INFO nova.compute.manager [-] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Took 1.03 seconds to deallocate network for instance. [ 897.081706] env[62109]: DEBUG nova.compute.claims [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 897.081913] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.082854] env[62109]: DEBUG nova.network.neutron [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.083367] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] Expecting reply to msg a9657e314db14153b3a91e934bcc31ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 897.091175] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9657e314db14153b3a91e934bcc31ba [ 897.239058] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.239610] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 897.241351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 63090e3fcdae4307af4d11e0ca152177 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 897.242972] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.997s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.245263] env[62109]: INFO nova.compute.claims [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.246405] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg ae83efc9dc6c4ad6bff4f49b413c2bc9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 897.276068] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63090e3fcdae4307af4d11e0ca152177 [ 897.283110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae83efc9dc6c4ad6bff4f49b413c2bc9 [ 897.333119] env[62109]: DEBUG oslo_vmware.api [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401513, 'name': PowerOnVM_Task, 'duration_secs': 0.400584} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.333491] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 897.333812] env[62109]: DEBUG nova.compute.manager [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.334583] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b605a6f-d6f9-4ecd-b70e-cde082115a8a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.343520] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 4a3861ed977a45ac801eb5e0a1c598f4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 897.370764] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a3861ed977a45ac801eb5e0a1c598f4 [ 897.585140] env[62109]: DEBUG oslo_concurrency.lockutils [req-da9fc5f9-3601-4b26-9716-22e8fbb52002 req-6242c53a-3a60-4b9f-9e39-fbc3fe7de9b4 service nova] Releasing lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.585585] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Acquired lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.585770] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 897.586228] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 6be33f5f243b4507851e8a7c6dad2a0e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 897.593251] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6be33f5f243b4507851e8a7c6dad2a0e [ 897.744946] env[62109]: DEBUG nova.compute.utils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.745618] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg ac9bba4d505249ae9669249352cd9f32 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 897.746584] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 897.746713] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 897.749920] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 8a74513fe60543e2a0ce1c75c2a323f2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 897.757404] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac9bba4d505249ae9669249352cd9f32 [ 897.757654] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a74513fe60543e2a0ce1c75c2a323f2 [ 897.795050] env[62109]: DEBUG nova.policy [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85bae9b15b0f43c0bc6e4d4f4c6a28e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4392957210d14f89af11bf7b1bf7ffc7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 897.851486] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.102995] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 898.190075] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.190646] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg eca059e5b323430a9350bdf646333f62 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 898.200037] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eca059e5b323430a9350bdf646333f62 [ 898.224894] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Successfully created port: cd11637a-e069-4299-9bbb-23a2b33fa21c {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.249972] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 898.251747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 6cbb2d83c73e43cf8d2a8a06e419f00c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 898.301172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cbb2d83c73e43cf8d2a8a06e419f00c [ 898.463642] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Expecting reply to msg b98cbe5f788941a484af7d78362a160b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 898.469469] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9c17e9-4542-491a-80fc-985d88c09311 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.472747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b98cbe5f788941a484af7d78362a160b [ 898.478368] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a684cc6e-773f-434e-a52e-6d371dd288d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.515626] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3f479c-07bf-462d-a308-18f1a3cf1d0d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.520176] env[62109]: DEBUG nova.compute.manager [req-18232408-610f-40d6-8b21-3789da164353 req-a7aa532d-a5b8-4cef-9f4b-3aeccbc4c636 service nova] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Received event network-vif-deleted-e979a70d-ad81-4d49-a987-fcf30691d88c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 898.525433] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e7de6c-84ca-4a0d-ad30-157003c9d11a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.543475] env[62109]: DEBUG nova.compute.provider_tree [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.543980] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 54e9c178940142c38385f0c6e31b513f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 898.545691] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Successfully created port: 6c603375-1059-41ab-b8b9-ac8bb8da3762 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.551676] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54e9c178940142c38385f0c6e31b513f [ 898.693203] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Releasing lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.693657] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 898.693850] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 898.694146] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be42351c-e26f-4de9-a3ad-147c82afc2ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.702542] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7151077b-a731-4e9c-98fc-ce0f58c2f27a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.724246] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a276656a-67b0-4ceb-918f-cfb323ed09fd could not be found. [ 898.724492] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 898.724675] env[62109]: INFO nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Took 0.03 seconds to destroy the instance on the hypervisor. [ 898.724954] env[62109]: DEBUG oslo.service.loopingcall [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.725198] env[62109]: DEBUG nova.compute.manager [-] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 898.725227] env[62109]: DEBUG nova.network.neutron [-] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 898.741185] env[62109]: DEBUG nova.network.neutron [-] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 898.741732] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 023982e6de6f4355af9b388b9d4a9a6a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 898.760120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 3c4d76149cc9495ca439dd82fd964748 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 898.764811] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 023982e6de6f4355af9b388b9d4a9a6a [ 898.789501] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c4d76149cc9495ca439dd82fd964748 [ 898.965745] env[62109]: INFO nova.compute.manager [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Rebuilding instance [ 899.006875] env[62109]: DEBUG nova.compute.manager [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 899.007724] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca5d24a-ac8e-4d17-a373-aeba6b8ba1c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.015432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Expecting reply to msg aea189d8e1324267a8f03299b513363e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 899.048320] env[62109]: DEBUG nova.scheduler.client.report [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 899.050914] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg d506db5f263e46b39198427d7ef0c816 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 899.057291] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aea189d8e1324267a8f03299b513363e [ 899.063728] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d506db5f263e46b39198427d7ef0c816 [ 899.258080] env[62109]: DEBUG nova.network.neutron [-] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.258573] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8ba33f3963a94c6cb3581672b01cedf4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 899.270921] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 899.271042] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ba33f3963a94c6cb3581672b01cedf4 [ 899.293336] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 899.293535] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 899.293692] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 899.293873] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 899.294012] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 899.294170] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 899.294353] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 899.294507] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 899.294674] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 899.294824] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 899.294990] env[62109]: DEBUG nova.virt.hardware [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 899.295830] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47443fd5-5adc-4769-9dfd-f8495f3b1f12 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.303531] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adce485-629d-49c9-9a4d-c04462b03db0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.516411] env[62109]: ERROR nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. [ 899.516411] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 899.516411] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 899.516411] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 899.516411] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 899.516411] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 899.516411] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 899.516411] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 899.516411] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 899.516411] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 899.516411] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 899.516411] env[62109]: ERROR nova.compute.manager raise self.value [ 899.516411] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 899.516411] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 899.516411] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 899.516411] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 899.516901] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 899.516901] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 899.516901] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. [ 899.516901] env[62109]: ERROR nova.compute.manager [ 899.516901] env[62109]: Traceback (most recent call last): [ 899.516901] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 899.516901] env[62109]: listener.cb(fileno) [ 899.516901] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 899.516901] env[62109]: result = function(*args, **kwargs) [ 899.516901] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 899.516901] env[62109]: return func(*args, **kwargs) [ 899.516901] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 899.516901] env[62109]: raise e [ 899.516901] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 899.516901] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 899.516901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 899.516901] env[62109]: created_port_ids = self._update_ports_for_instance( [ 899.516901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 899.516901] env[62109]: with excutils.save_and_reraise_exception(): [ 899.516901] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 899.516901] env[62109]: self.force_reraise() [ 899.516901] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 899.516901] env[62109]: raise self.value [ 899.516901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 899.516901] env[62109]: updated_port = self._update_port( [ 899.516901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 899.516901] env[62109]: _ensure_no_port_binding_failure(port) [ 899.516901] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 899.516901] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 899.517722] env[62109]: nova.exception.PortBindingFailed: Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. [ 899.517722] env[62109]: Removing descriptor: 19 [ 899.517722] env[62109]: ERROR nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Traceback (most recent call last): [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] yield resources [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self.driver.spawn(context, instance, image_meta, [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 899.517722] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] vm_ref = self.build_virtual_machine(instance, [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] vif_infos = vmwarevif.get_vif_info(self._session, [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] for vif in network_info: [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] return self._sync_wrapper(fn, *args, **kwargs) [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self.wait() [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self[:] = self._gt.wait() [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] return self._exit_event.wait() [ 899.518104] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] result = hub.switch() [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] return self.greenlet.switch() [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] result = function(*args, **kwargs) [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] return func(*args, **kwargs) [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] raise e [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] nwinfo = self.network_api.allocate_for_instance( [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 899.518520] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] created_port_ids = self._update_ports_for_instance( [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] with excutils.save_and_reraise_exception(): [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self.force_reraise() [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] raise self.value [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] updated_port = self._update_port( [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] _ensure_no_port_binding_failure(port) [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 899.518910] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] raise exception.PortBindingFailed(port_id=port['id']) [ 899.519245] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] nova.exception.PortBindingFailed: Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. [ 899.519245] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] [ 899.519245] env[62109]: INFO nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Terminating instance [ 899.520350] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 899.520863] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.521014] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquired lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.521173] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 899.521568] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 849d15dc15e44252bcddccd78fc59459 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 899.522262] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f19934e3-aeae-420c-9a20-4610bc3d1f64 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.529119] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 899.529119] env[62109]: value = "task-401514" [ 899.529119] env[62109]: _type = "Task" [ 899.529119] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.532737] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 849d15dc15e44252bcddccd78fc59459 [ 899.538428] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.552854] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.310s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.553455] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 899.555179] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 54d761a5b93846c9a91c3b105c1b06bc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 899.556158] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.219s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.558151] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg b76abe4184cf4d0ab3d8e3c940f7d45f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 899.587682] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54d761a5b93846c9a91c3b105c1b06bc [ 899.592099] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b76abe4184cf4d0ab3d8e3c940f7d45f [ 899.761324] env[62109]: INFO nova.compute.manager [-] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Took 1.04 seconds to deallocate network for instance. [ 899.764627] env[62109]: DEBUG nova.compute.claims [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 899.764826] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.038794] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401514, 'name': PowerOffVM_Task, 'duration_secs': 0.107482} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.039083] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 900.039310] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 900.040098] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c8bd16-7b00-4036-aaa9-8eca9e6b6c74 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.046940] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 900.047155] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a439b1b-195f-48cf-9dbf-e5fa3fbed352 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.051949] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 900.060915] env[62109]: DEBUG nova.compute.utils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 900.061628] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 15fe030ed7cb4e028a6d32214f7accf6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 900.066576] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 900.066945] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 900.074935] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 900.075155] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 900.075331] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Deleting the datastore file [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.075585] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03422f74-0a26-4914-96b0-df0c2b511c8b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.078662] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15fe030ed7cb4e028a6d32214f7accf6 [ 900.082050] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 900.082050] env[62109]: value = "task-401516" [ 900.082050] env[62109]: _type = "Task" [ 900.082050] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.093042] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.107588] env[62109]: DEBUG nova.policy [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6349e1aff7d945a6a471b1f4e826b23f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61a866168186462d9d849072a1ff25f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 900.207049] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.207775] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 32739f7ffe6b4cbdb031ca4c3b4b7838 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 900.217543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32739f7ffe6b4cbdb031ca4c3b4b7838 [ 900.254515] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81013e2c-1037-4983-9c34-d6f6eee108a7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.263046] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22821df-8b94-4d5f-affa-63507da02987 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.294599] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2b7d92-3b22-42d9-8bfd-f75f0711ca1b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.301722] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d2fd25-8add-445f-af40-816eb5c5ae7e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.314596] env[62109]: DEBUG nova.compute.provider_tree [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.315094] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg e5dba396736247779aa3509695de1894 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 900.323020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5dba396736247779aa3509695de1894 [ 900.410541] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Successfully created port: 792b62d9-f0ba-485b-9131-66603f84de93 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.554255] env[62109]: DEBUG nova.compute.manager [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Received event network-changed-cd11637a-e069-4299-9bbb-23a2b33fa21c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 900.554444] env[62109]: DEBUG nova.compute.manager [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Refreshing instance network info cache due to event network-changed-cd11637a-e069-4299-9bbb-23a2b33fa21c. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 900.554631] env[62109]: DEBUG oslo_concurrency.lockutils [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] Acquiring lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.567237] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 900.569149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 1f94ab92a8c548f085b73ec368ad2f0b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 900.592179] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116844} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.592444] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.592630] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 900.592803] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 900.594379] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Expecting reply to msg c9c31fafd83e44329916d1b0061dd2a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 900.603619] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f94ab92a8c548f085b73ec368ad2f0b [ 900.660838] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9c31fafd83e44329916d1b0061dd2a5 [ 900.710223] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Releasing lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.710659] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 900.710854] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 900.711197] env[62109]: DEBUG oslo_concurrency.lockutils [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] Acquired lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.711377] env[62109]: DEBUG nova.network.neutron [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Refreshing network info cache for port cd11637a-e069-4299-9bbb-23a2b33fa21c {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 900.711816] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] Expecting reply to msg 52d9846a23ce42c7ab76107251fc7ba5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 900.712684] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e939556-1237-41b3-b590-e3683582f83e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.719882] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52d9846a23ce42c7ab76107251fc7ba5 [ 900.722803] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff0144f-dd8f-4189-bc56-bb3b92cc6116 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.748428] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d851f6a6-07aa-4e64-a007-8a42a8ae9c42 could not be found. [ 900.748654] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 900.748830] env[62109]: INFO nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Took 0.04 seconds to destroy the instance on the hypervisor. [ 900.749106] env[62109]: DEBUG oslo.service.loopingcall [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 900.749329] env[62109]: DEBUG nova.compute.manager [-] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 900.749419] env[62109]: DEBUG nova.network.neutron [-] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 900.784596] env[62109]: DEBUG nova.network.neutron [-] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 900.818266] env[62109]: DEBUG nova.scheduler.client.report [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 900.820798] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 58867d869756455fb585b9ed788996fb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 900.835106] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58867d869756455fb585b9ed788996fb [ 900.972281] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b4ed6dc2da584a988291bbbe9f673051 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 900.980179] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4ed6dc2da584a988291bbbe9f673051 [ 901.080299] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 6542a14ab67d4ecabd7497dd50456e51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.101476] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Expecting reply to msg e853d52adbca4433877ab3ad5222afe5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.114916] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6542a14ab67d4ecabd7497dd50456e51 [ 901.120415] env[62109]: DEBUG nova.compute.manager [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Received event network-changed-792b62d9-f0ba-485b-9131-66603f84de93 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 901.120607] env[62109]: DEBUG nova.compute.manager [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Refreshing instance network info cache due to event network-changed-792b62d9-f0ba-485b-9131-66603f84de93. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 901.120817] env[62109]: DEBUG oslo_concurrency.lockutils [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] Acquiring lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.120954] env[62109]: DEBUG oslo_concurrency.lockutils [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] Acquired lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.121112] env[62109]: DEBUG nova.network.neutron [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Refreshing network info cache for port 792b62d9-f0ba-485b-9131-66603f84de93 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 901.121700] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] Expecting reply to msg 0f82b573b48340a9a83c305c4c3d9d7b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.127838] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f82b573b48340a9a83c305c4c3d9d7b [ 901.145786] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e853d52adbca4433877ab3ad5222afe5 [ 901.232154] env[62109]: DEBUG nova.network.neutron [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.323741] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.767s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.324418] env[62109]: ERROR nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Traceback (most recent call last): [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self.driver.spawn(context, instance, image_meta, [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] vm_ref = self.build_virtual_machine(instance, [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] vif_infos = vmwarevif.get_vif_info(self._session, [ 901.324418] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] for vif in network_info: [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] return self._sync_wrapper(fn, *args, **kwargs) [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self.wait() [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self[:] = self._gt.wait() [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] return self._exit_event.wait() [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] current.throw(*self._exc) [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 901.324722] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] result = function(*args, **kwargs) [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] return func(*args, **kwargs) [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] raise e [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] nwinfo = self.network_api.allocate_for_instance( [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] created_port_ids = self._update_ports_for_instance( [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] with excutils.save_and_reraise_exception(): [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] self.force_reraise() [ 901.325042] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] raise self.value [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] updated_port = self._update_port( [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] _ensure_no_port_binding_failure(port) [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] raise exception.PortBindingFailed(port_id=port['id']) [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] nova.exception.PortBindingFailed: Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. [ 901.325359] env[62109]: ERROR nova.compute.manager [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] [ 901.325359] env[62109]: DEBUG nova.compute.utils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 901.327280] env[62109]: ERROR nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. [ 901.327280] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 901.327280] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 901.327280] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 901.327280] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 901.327280] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 901.327280] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 901.327280] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 901.327280] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 901.327280] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 901.327280] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 901.327280] env[62109]: ERROR nova.compute.manager raise self.value [ 901.327280] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 901.327280] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 901.327280] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 901.327280] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 901.327704] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 901.327704] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 901.327704] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. [ 901.327704] env[62109]: ERROR nova.compute.manager [ 901.327704] env[62109]: Traceback (most recent call last): [ 901.327704] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 901.327704] env[62109]: listener.cb(fileno) [ 901.327704] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 901.327704] env[62109]: result = function(*args, **kwargs) [ 901.327704] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 901.327704] env[62109]: return func(*args, **kwargs) [ 901.327704] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 901.327704] env[62109]: raise e [ 901.327704] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 901.327704] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 901.327704] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 901.327704] env[62109]: created_port_ids = self._update_ports_for_instance( [ 901.327704] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 901.327704] env[62109]: with excutils.save_and_reraise_exception(): [ 901.327704] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 901.327704] env[62109]: self.force_reraise() [ 901.327704] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 901.327704] env[62109]: raise self.value [ 901.327704] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 901.327704] env[62109]: updated_port = self._update_port( [ 901.327704] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 901.327704] env[62109]: _ensure_no_port_binding_failure(port) [ 901.327704] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 901.327704] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 901.328449] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. [ 901.328449] env[62109]: Removing descriptor: 16 [ 901.328449] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.670s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.330499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 553ca6e799c3419bbbcfe215e0087a12 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.331762] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Build of instance f453b695-8abd-44fa-8468-75c6aaeec19a was re-scheduled: Binding failed for port b79d0813-93ae-42da-a060-e3a3b0e18d63, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 901.332216] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 901.332438] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Acquiring lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.332582] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Acquired lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.332736] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 901.333096] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 9b8ea417a79a4b7b8d9bfa3bf786b072 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.339124] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b8ea417a79a4b7b8d9bfa3bf786b072 [ 901.345650] env[62109]: DEBUG nova.network.neutron [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.346137] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] Expecting reply to msg 6f040e1382c5404a8a99fb62084e48f5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.368750] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f040e1382c5404a8a99fb62084e48f5 [ 901.382425] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 553ca6e799c3419bbbcfe215e0087a12 [ 901.474076] env[62109]: DEBUG nova.network.neutron [-] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.475000] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 56bb72684611419d932f0423f3746792 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.482960] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56bb72684611419d932f0423f3746792 [ 901.576715] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 901.600885] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 901.601163] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 901.601359] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.601578] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 901.601757] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.601962] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 901.602206] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 901.602401] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 901.602609] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 901.602808] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 901.603012] env[62109]: DEBUG nova.virt.hardware [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 901.605987] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101b0fca-7f9f-4d5f-8d09-505b4f290257 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.614646] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1d2edb-da76-4f6a-b2b0-2a30af66f4d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.631107] env[62109]: ERROR nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Traceback (most recent call last): [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] yield resources [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self.driver.spawn(context, instance, image_meta, [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] vm_ref = self.build_virtual_machine(instance, [ 901.631107] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] vif_infos = vmwarevif.get_vif_info(self._session, [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] for vif in network_info: [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] return self._sync_wrapper(fn, *args, **kwargs) [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self.wait() [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self[:] = self._gt.wait() [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] return self._exit_event.wait() [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 901.631461] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] current.throw(*self._exc) [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] result = function(*args, **kwargs) [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] return func(*args, **kwargs) [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] raise e [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] nwinfo = self.network_api.allocate_for_instance( [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] created_port_ids = self._update_ports_for_instance( [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] with excutils.save_and_reraise_exception(): [ 901.631807] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self.force_reraise() [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] raise self.value [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] updated_port = self._update_port( [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] _ensure_no_port_binding_failure(port) [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] raise exception.PortBindingFailed(port_id=port['id']) [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] nova.exception.PortBindingFailed: Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. [ 901.632281] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] [ 901.632281] env[62109]: INFO nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Terminating instance [ 901.635264] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 901.635529] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 901.635721] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.635952] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 901.636241] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.636447] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 901.636693] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 901.636891] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 901.637094] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 901.637294] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 901.637503] env[62109]: DEBUG nova.virt.hardware [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 901.638281] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8b3a54-986a-430c-bdff-3f8f73ec1847 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.640805] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.645941] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2bbdcd-0a2a-4153-bcd1-1833deb60ba8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.658530] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.663933] env[62109]: DEBUG oslo.service.loopingcall [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.664665] env[62109]: DEBUG nova.network.neutron [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.666223] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 901.666470] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dadfc574-37cb-4f1a-943c-9a0899e29acf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.683070] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.683070] env[62109]: value = "task-401517" [ 901.683070] env[62109]: _type = "Task" [ 901.683070] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.690159] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401517, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.747876] env[62109]: DEBUG nova.network.neutron [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.748585] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] Expecting reply to msg 089e3dc781e042fd9479f78282499c48 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.757776] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 089e3dc781e042fd9479f78282499c48 [ 901.849297] env[62109]: DEBUG oslo_concurrency.lockutils [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] Releasing lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.849819] env[62109]: DEBUG nova.compute.manager [req-844b94d9-b506-4d83-a701-58555f4dff87 req-e8af820a-5474-4491-929d-1bc25b5cc53c service nova] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Received event network-vif-deleted-cd11637a-e069-4299-9bbb-23a2b33fa21c {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 901.857028] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.961549] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.961549] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 66b9dc657b234b7d99fbb1a1fb7dd62a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 901.972065] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66b9dc657b234b7d99fbb1a1fb7dd62a [ 901.979076] env[62109]: INFO nova.compute.manager [-] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Took 1.23 seconds to deallocate network for instance. [ 901.984026] env[62109]: DEBUG nova.compute.claims [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 901.984026] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.062598] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad1db7b-566b-4a19-af77-a5858ca42891 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.070343] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defed159-0acd-4c28-aafb-9df80bc11622 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.100472] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b3a5a9-e68f-4a89-ab4f-ad4264bea6f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.107682] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18b1084-9058-4f6d-8fb5-3321b1c2585e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.121485] env[62109]: DEBUG nova.compute.provider_tree [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.121970] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 1169d89ba81943af85e64c3e4c70f803 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 902.129581] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1169d89ba81943af85e64c3e4c70f803 [ 902.192756] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401517, 'name': CreateVM_Task, 'duration_secs': 0.256997} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.192920] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 902.193326] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.193487] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.193799] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.194042] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3e77fe8-3056-4707-b5e2-8aa8552a4df4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.198547] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 902.198547] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]529a2097-5504-29ea-4acc-2c00aa204e0f" [ 902.198547] env[62109]: _type = "Task" [ 902.198547] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.205643] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]529a2097-5504-29ea-4acc-2c00aa204e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.251570] env[62109]: DEBUG oslo_concurrency.lockutils [req-2f180a72-b520-4105-8bb1-cb2b6806db2d req-3a87aaaa-dff0-40c8-82bd-dac580d942d5 service nova] Releasing lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.251976] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.252192] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 902.252639] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 1c4d233f8dbd447e94c0ac8a2ae43b6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 902.261192] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c4d233f8dbd447e94c0ac8a2ae43b6d [ 902.462685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Releasing lock "refresh_cache-f453b695-8abd-44fa-8468-75c6aaeec19a" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.462940] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 902.463124] env[62109]: DEBUG nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 902.463291] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 902.476606] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 902.477252] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg db72df002c8c4150b0e7f991efe5845e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 902.485400] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db72df002c8c4150b0e7f991efe5845e [ 902.630972] env[62109]: DEBUG nova.scheduler.client.report [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 902.633426] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 5f9960aa19274a01beddec2f23f9f4cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 902.644541] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f9960aa19274a01beddec2f23f9f4cd [ 902.709527] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]529a2097-5504-29ea-4acc-2c00aa204e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.008807} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.709815] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.710045] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.710323] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.710433] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.710596] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.710844] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1023cb94-f55e-427e-9e57-7e16cf7bc9fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.718489] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.718655] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 902.719318] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c11a796e-6b3b-4031-b951-572477f5a5fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.723775] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 902.723775] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5292911f-b2d2-db78-008b-b33f8d51ff9c" [ 902.723775] env[62109]: _type = "Task" [ 902.723775] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.730767] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5292911f-b2d2-db78-008b-b33f8d51ff9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.776648] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 902.855777] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.856342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 9484fafaf8a243f889a64225a625cf5d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 902.864243] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9484fafaf8a243f889a64225a625cf5d [ 902.979544] env[62109]: DEBUG nova.network.neutron [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.980085] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 001a11ffe22a4e7bbe383ec0ecb60607 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 902.988310] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 001a11ffe22a4e7bbe383ec0ecb60607 [ 903.142416] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.813s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.142416] env[62109]: ERROR nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. [ 903.142416] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Traceback (most recent call last): [ 903.142416] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 903.142416] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self.driver.spawn(context, instance, image_meta, [ 903.142416] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 903.142416] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self._vmops.spawn(context, instance, image_meta, injected_files, [ 903.142416] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 903.142416] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] vm_ref = self.build_virtual_machine(instance, [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] vif_infos = vmwarevif.get_vif_info(self._session, [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] for vif in network_info: [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] return self._sync_wrapper(fn, *args, **kwargs) [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self.wait() [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self[:] = self._gt.wait() [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] return self._exit_event.wait() [ 903.142767] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] current.throw(*self._exc) [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] result = function(*args, **kwargs) [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] return func(*args, **kwargs) [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] raise e [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] nwinfo = self.network_api.allocate_for_instance( [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] created_port_ids = self._update_ports_for_instance( [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 903.143135] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] with excutils.save_and_reraise_exception(): [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] self.force_reraise() [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] raise self.value [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] updated_port = self._update_port( [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] _ensure_no_port_binding_failure(port) [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] raise exception.PortBindingFailed(port_id=port['id']) [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] nova.exception.PortBindingFailed: Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. [ 903.143509] env[62109]: ERROR nova.compute.manager [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] [ 903.143856] env[62109]: DEBUG nova.compute.utils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 903.143856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.724s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.143856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.143856] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 903.143856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.581s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.144933] env[62109]: INFO nova.compute.claims [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.146706] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 5fdefb42e02e4bfeaedf3bde8b4527a3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 903.148937] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Build of instance e2e09174-6ba1-44ad-ba3e-cdcae5a2d698 was re-scheduled: Binding failed for port 492e9847-c7bd-424b-b9ba-eed84aef6eb0, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 903.149348] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 903.149570] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.149715] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.149869] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 903.150292] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 1874fa3b40234f1cbbf091248892f4e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 903.151484] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e269e50-3e87-44d2-8e59-602c035f2d98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.154726] env[62109]: DEBUG nova.compute.manager [req-fcd7dce0-0f5c-4322-b689-3c69641f344b req-2d2cf03f-3c28-4d65-b2c2-8ee4478b4b9d service nova] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Received event network-vif-deleted-792b62d9-f0ba-485b-9131-66603f84de93 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 903.156214] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1874fa3b40234f1cbbf091248892f4e2 [ 903.161517] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8aeb75-414b-4da1-8c89-9ee12ce74592 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.176129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5ef9b4-1fb3-4e22-bb79-b5857c66b3be {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.178680] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fdefb42e02e4bfeaedf3bde8b4527a3 [ 903.182819] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1cdd5c-0a29-476e-903a-21913c787fa3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.215284] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181488MB free_disk=124GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 903.215442] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.232387] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5292911f-b2d2-db78-008b-b33f8d51ff9c, 'name': SearchDatastore_Task, 'duration_secs': 0.007522} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.233105] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe1fb17f-3d60-4bdb-8b43-777ac95c8e0f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.237705] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 903.237705] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52996d62-c7ee-49d9-79e9-cffb3a9b7bd6" [ 903.237705] env[62109]: _type = "Task" [ 903.237705] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.245177] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52996d62-c7ee-49d9-79e9-cffb3a9b7bd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.358575] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.359247] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 903.359613] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 903.360100] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6cd2a41a-f41a-4ecf-b1f3-100361ca9c7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.369869] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c545ae-d81a-44ae-af0e-2e8f553d3ed0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.394402] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 55da10ab-e116-4ead-90ff-c82fffb2dcc6 could not be found. [ 903.394594] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 903.394772] env[62109]: INFO nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 903.395013] env[62109]: DEBUG oslo.service.loopingcall [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 903.395244] env[62109]: DEBUG nova.compute.manager [-] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 903.395340] env[62109]: DEBUG nova.network.neutron [-] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 903.408258] env[62109]: DEBUG nova.network.neutron [-] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 903.408730] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8215637a7e634d84b1399b95c8ff462a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 903.416392] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8215637a7e634d84b1399b95c8ff462a [ 903.483111] env[62109]: INFO nova.compute.manager [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] [instance: f453b695-8abd-44fa-8468-75c6aaeec19a] Took 1.02 seconds to deallocate network for instance. [ 903.485363] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg f42ea2279d5944dc9201d5cbe886e47f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 903.520628] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f42ea2279d5944dc9201d5cbe886e47f [ 903.657486] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 226e99b8d7da4815a69190c3d6c771a7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 903.666899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 226e99b8d7da4815a69190c3d6c771a7 [ 903.671733] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 903.746777] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52996d62-c7ee-49d9-79e9-cffb3a9b7bd6, 'name': SearchDatastore_Task, 'duration_secs': 0.009322} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.747022] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.747277] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 903.747517] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-737ef594-5168-449b-9f41-df9fa50d639e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.750315] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.750748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 08ca8a952e13432d882e9b5e8d47b9c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 903.754184] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 903.754184] env[62109]: value = "task-401518" [ 903.754184] env[62109]: _type = "Task" [ 903.754184] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.759166] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08ca8a952e13432d882e9b5e8d47b9c2 [ 903.762455] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.911251] env[62109]: DEBUG nova.network.neutron [-] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.911790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 83a25e36ce1d40e988cff0e9a6eee9f3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 903.922128] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83a25e36ce1d40e988cff0e9a6eee9f3 [ 903.991081] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg 88af3be7953e4af99e9be526092549a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 904.026632] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88af3be7953e4af99e9be526092549a5 [ 904.253011] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.253338] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 904.253581] env[62109]: DEBUG nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 904.253891] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.267244] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433167} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.267708] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 904.268116] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 904.268442] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ecd6b82-c8ba-44ce-89d4-9bb125bf9225 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.275254] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 904.275254] env[62109]: value = "task-401519" [ 904.275254] env[62109]: _type = "Task" [ 904.275254] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.276038] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 904.276623] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 5f1cedcb27c648ab8c7d8aa869d537c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 904.288062] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f1cedcb27c648ab8c7d8aa869d537c2 [ 904.288551] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.338856] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780fdb45-20db-4221-a5d8-9c3899ef0004 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.346739] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797b1d54-77ed-45c8-939f-b2c870547bd1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.377806] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7b0e44-a5b1-496f-9eda-e31e93f2280e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.385043] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b227ed1b-cdf5-4252-b0d5-9ca4f334489a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.398908] env[62109]: DEBUG nova.compute.provider_tree [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.399464] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 454317bc33e84578950f7e0cf3890167 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 904.407139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 454317bc33e84578950f7e0cf3890167 [ 904.414379] env[62109]: INFO nova.compute.manager [-] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Took 1.02 seconds to deallocate network for instance. [ 904.416520] env[62109]: DEBUG nova.compute.claims [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 904.416764] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.511217] env[62109]: INFO nova.scheduler.client.report [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Deleted allocations for instance f453b695-8abd-44fa-8468-75c6aaeec19a [ 904.517267] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Expecting reply to msg c2749bd14db140458e0621e550137d0e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 904.532552] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2749bd14db140458e0621e550137d0e [ 904.779384] env[62109]: DEBUG nova.network.neutron [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.779660] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 2422f9ab6c884a549648c43cf11dac36 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 904.787975] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2422f9ab6c884a549648c43cf11dac36 [ 904.792044] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067825} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.792291] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 904.793048] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdee744-58b3-4fad-91fb-24d542e15d5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.822389] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 904.823013] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9920759e-85ec-4cbb-9bb3-e4e4e93c05b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.853102] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 904.853102] env[62109]: value = "task-401520" [ 904.853102] env[62109]: _type = "Task" [ 904.853102] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.860353] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401520, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.901762] env[62109]: DEBUG nova.scheduler.client.report [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.904177] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 8a3a85e9929a4061a6db02b98d94a2dc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 904.917429] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a3a85e9929a4061a6db02b98d94a2dc [ 905.023291] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8da93c1e-ac61-4029-a339-d79994345f76 tempest-ServersTestBootFromVolume-235730665 tempest-ServersTestBootFromVolume-235730665-project-member] Lock "f453b695-8abd-44fa-8468-75c6aaeec19a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.088s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.023291] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg b3794935f19545f8afd7b318a45a3995 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 905.031854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3794935f19545f8afd7b318a45a3995 [ 905.287154] env[62109]: INFO nova.compute.manager [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: e2e09174-6ba1-44ad-ba3e-cdcae5a2d698] Took 1.03 seconds to deallocate network for instance. [ 905.288899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 9013e53eaecf4d2c9be2f190ab62f56f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 905.320382] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9013e53eaecf4d2c9be2f190ab62f56f [ 905.363522] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401520, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.406972] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.407685] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 905.409537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 9a4be57046fc4e8f992ed4afa83f1fb6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 905.410600] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.459s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.412586] env[62109]: INFO nova.compute.claims [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.414515] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 957dbd078c464a778987a414b2d60cc5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 905.441594] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a4be57046fc4e8f992ed4afa83f1fb6 [ 905.448916] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 957dbd078c464a778987a414b2d60cc5 [ 905.524273] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 905.525955] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 52601f9694d54008bd893f029c82347e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 905.563444] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52601f9694d54008bd893f029c82347e [ 905.793677] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 2574e4e80c0544789230ba0b7e2bb20e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 905.824523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2574e4e80c0544789230ba0b7e2bb20e [ 905.862968] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401520, 'name': ReconfigVM_Task, 'duration_secs': 0.523188} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.863413] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585/309a7bae-82f5-4b9e-ac86-e0f1803f2585.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.864144] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48e04949-a0b1-4ef0-8763-66b49e1d89a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.869946] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 905.869946] env[62109]: value = "task-401522" [ 905.869946] env[62109]: _type = "Task" [ 905.869946] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.877273] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401522, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.917461] env[62109]: DEBUG nova.compute.utils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.918548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 5abe422b55974d98b7f4fb7c93d39424 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 905.920821] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg f23b557f6310498f8dacd3499fc12bcf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 905.922010] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 905.922301] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 905.928633] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f23b557f6310498f8dacd3499fc12bcf [ 905.932868] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5abe422b55974d98b7f4fb7c93d39424 [ 905.978379] env[62109]: DEBUG nova.policy [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23f9946393284244aca0c29201c37fa4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21cdf8675fb347c2874d912dcb8ac002', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 906.048311] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.319791] env[62109]: INFO nova.scheduler.client.report [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Deleted allocations for instance e2e09174-6ba1-44ad-ba3e-cdcae5a2d698 [ 906.325873] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg b84fbe23750648c0b8ca176444f17e89 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 906.330575] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Successfully created port: c42dbb9c-5fa1-493e-8235-912a6dd1e291 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.339686] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b84fbe23750648c0b8ca176444f17e89 [ 906.380190] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401522, 'name': Rename_Task, 'duration_secs': 0.130352} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.380497] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 906.380748] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1155317-9bd5-43d9-9706-498accfa15df {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.386969] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Waiting for the task: (returnval){ [ 906.386969] env[62109]: value = "task-401523" [ 906.386969] env[62109]: _type = "Task" [ 906.386969] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.398201] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401523, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.425328] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 906.427269] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 624b8d001b474666bb2333fa72b2fed1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 906.486550] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 624b8d001b474666bb2333fa72b2fed1 [ 906.617434] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377ae53d-eabb-41ef-a2b6-586396cf4477 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.625603] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2653f28-9417-49a6-8dd9-e9d5d6667976 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.663015] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcd8c04-d9ae-4cee-9a17-f778e783fbf8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.674977] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3170b3fa-c0a4-4c42-b700-99267fb13b27 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.688166] env[62109]: DEBUG nova.compute.provider_tree [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.688691] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg ecca84c076d8401eb14406ea8432426d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 906.696278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecca84c076d8401eb14406ea8432426d [ 906.828689] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97e001d4-24dc-41bf-acc1-538381c890c3 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "e2e09174-6ba1-44ad-ba3e-cdcae5a2d698" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.493s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.829475] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg d0133c2248844770ac8b79cb6a49371b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 906.839569] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0133c2248844770ac8b79cb6a49371b [ 906.897352] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401523, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.947913] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 480ba0f421584482a84e6012bcff63ae in queue reply_7522b64acfeb4981b1f36928b040d568 [ 906.993039] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 480ba0f421584482a84e6012bcff63ae [ 907.192858] env[62109]: DEBUG nova.scheduler.client.report [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 907.195746] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg bca8fb62af3244218ab55a39e287e5d8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 907.226871] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bca8fb62af3244218ab55a39e287e5d8 [ 907.334793] env[62109]: DEBUG nova.compute.manager [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 907.334793] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 2968a6461b2845fd8ffae2fec73c2eb0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 907.398453] env[62109]: DEBUG oslo_vmware.api [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Task: {'id': task-401523, 'name': PowerOnVM_Task, 'duration_secs': 0.613107} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.399188] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2968a6461b2845fd8ffae2fec73c2eb0 [ 907.400043] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 907.400400] env[62109]: DEBUG nova.compute.manager [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 907.401276] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a51311e-2d8a-4e9e-ab88-bbf5f7e039f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.410189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Expecting reply to msg 7469099df45c4e51a68383e96d9115a4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 907.443935] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7469099df45c4e51a68383e96d9115a4 [ 907.452817] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 907.480289] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 907.480611] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 907.480699] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.481731] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 907.481731] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.481731] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 907.481731] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 907.481731] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 907.482063] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 907.482063] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 907.482063] env[62109]: DEBUG nova.virt.hardware [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.483247] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156d5edd-f1d3-4169-9fa8-1bd7723d1599 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.491109] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483b661e-0bce-48bf-a571-f8686e817fe1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.511800] env[62109]: DEBUG nova.compute.manager [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Received event network-changed-c42dbb9c-5fa1-493e-8235-912a6dd1e291 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 907.511800] env[62109]: DEBUG nova.compute.manager [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Refreshing instance network info cache due to event network-changed-c42dbb9c-5fa1-493e-8235-912a6dd1e291. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 907.511800] env[62109]: DEBUG oslo_concurrency.lockutils [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] Acquiring lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.511955] env[62109]: DEBUG oslo_concurrency.lockutils [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] Acquired lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.512064] env[62109]: DEBUG nova.network.neutron [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Refreshing network info cache for port c42dbb9c-5fa1-493e-8235-912a6dd1e291 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 907.513010] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] Expecting reply to msg 9633428e97bf421387d80e5aec9e56d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 907.519180] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9633428e97bf421387d80e5aec9e56d4 [ 907.685052] env[62109]: ERROR nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. [ 907.685052] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 907.685052] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 907.685052] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 907.685052] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 907.685052] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 907.685052] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 907.685052] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 907.685052] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.685052] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 907.685052] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.685052] env[62109]: ERROR nova.compute.manager raise self.value [ 907.685052] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 907.685052] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 907.685052] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 907.685052] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 907.685496] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 907.685496] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 907.685496] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. [ 907.685496] env[62109]: ERROR nova.compute.manager [ 907.685496] env[62109]: Traceback (most recent call last): [ 907.685496] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 907.685496] env[62109]: listener.cb(fileno) [ 907.685496] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 907.685496] env[62109]: result = function(*args, **kwargs) [ 907.685496] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 907.685496] env[62109]: return func(*args, **kwargs) [ 907.685496] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 907.685496] env[62109]: raise e [ 907.685496] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 907.685496] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 907.685496] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 907.685496] env[62109]: created_port_ids = self._update_ports_for_instance( [ 907.685496] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 907.685496] env[62109]: with excutils.save_and_reraise_exception(): [ 907.685496] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.685496] env[62109]: self.force_reraise() [ 907.685496] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.685496] env[62109]: raise self.value [ 907.685496] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 907.685496] env[62109]: updated_port = self._update_port( [ 907.685496] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 907.685496] env[62109]: _ensure_no_port_binding_failure(port) [ 907.685496] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 907.685496] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 907.686387] env[62109]: nova.exception.PortBindingFailed: Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. [ 907.686387] env[62109]: Removing descriptor: 19 [ 907.686387] env[62109]: ERROR nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] Traceback (most recent call last): [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] yield resources [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self.driver.spawn(context, instance, image_meta, [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self._vmops.spawn(context, instance, image_meta, injected_files, [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 907.686387] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] vm_ref = self.build_virtual_machine(instance, [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] vif_infos = vmwarevif.get_vif_info(self._session, [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] for vif in network_info: [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] return self._sync_wrapper(fn, *args, **kwargs) [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self.wait() [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self[:] = self._gt.wait() [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] return self._exit_event.wait() [ 907.686712] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] result = hub.switch() [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] return self.greenlet.switch() [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] result = function(*args, **kwargs) [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] return func(*args, **kwargs) [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] raise e [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] nwinfo = self.network_api.allocate_for_instance( [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 907.687126] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] created_port_ids = self._update_ports_for_instance( [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] with excutils.save_and_reraise_exception(): [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self.force_reraise() [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] raise self.value [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] updated_port = self._update_port( [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] _ensure_no_port_binding_failure(port) [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 907.687472] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] raise exception.PortBindingFailed(port_id=port['id']) [ 907.687795] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] nova.exception.PortBindingFailed: Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. [ 907.687795] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] [ 907.687795] env[62109]: INFO nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Terminating instance [ 907.688830] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.705735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.706349] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 907.708360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 1041c142a03647cabb8c39e2a21aed9d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 907.709836] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.682s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.712041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg ebeb0bfeb16b432fabfc7a5ac4280a51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 907.746958] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1041c142a03647cabb8c39e2a21aed9d [ 907.748084] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebeb0bfeb16b432fabfc7a5ac4280a51 [ 907.853578] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.918675] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.036731] env[62109]: DEBUG nova.network.neutron [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.127500] env[62109]: DEBUG nova.network.neutron [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.128155] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] Expecting reply to msg 7ca4eaa5b86447b1a96bfbbc1d391158 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 908.135402] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ca4eaa5b86447b1a96bfbbc1d391158 [ 908.215337] env[62109]: DEBUG nova.compute.utils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 908.216168] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 52656fb9b2ed4e769073e2669f1d4365 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 908.219907] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 908.220125] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 908.235224] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52656fb9b2ed4e769073e2669f1d4365 [ 908.296867] env[62109]: DEBUG nova.policy [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba5969be1a254281b4dffd81aa84ae7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa59611b27bb41beb282e68ccfa6fadc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 908.457318] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19475289-1e9f-4a03-87c7-2161f388eddc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.472195] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6b3955-b7a8-44a7-85f6-f34c9d6b5121 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.507105] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18e4c4d-6512-42c3-8e3c-9128105e8c07 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.511892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg b7f3644a32854c5a8acfe9eb310f9f24 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 908.519184] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc50b5a-6ac4-4d99-8faf-85bc03e885bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.524997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7f3644a32854c5a8acfe9eb310f9f24 [ 908.535034] env[62109]: DEBUG nova.compute.provider_tree [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.535634] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 9d34450718844a44bfe13f9a40bd7469 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 908.548912] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d34450718844a44bfe13f9a40bd7469 [ 908.593875] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "27fed863-1e27-4258-8b43-b8cd23e3c1c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.594065] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "27fed863-1e27-4258-8b43-b8cd23e3c1c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.627858] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Successfully created port: 711f1545-3683-4e94-8275-4892d964343b {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 908.630367] env[62109]: DEBUG oslo_concurrency.lockutils [req-e1d54210-14c5-4eab-9762-11487dbbe984 req-47ce248e-7e74-4b7c-bb3d-788674be369f service nova] Releasing lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.630672] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquired lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.630853] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.631264] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 9ea450f84eb643cab8bcaa660b6edc40 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 908.644529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ea450f84eb643cab8bcaa660b6edc40 [ 908.723365] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 908.725163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a9c3050c6b634831a3e966d05945765f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 908.756132] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9c3050c6b634831a3e966d05945765f [ 909.014448] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.014724] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.014955] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.015141] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.015314] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.017941] env[62109]: INFO nova.compute.manager [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Terminating instance [ 909.019764] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "refresh_cache-309a7bae-82f5-4b9e-ac86-e0f1803f2585" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.019925] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquired lock "refresh_cache-309a7bae-82f5-4b9e-ac86-e0f1803f2585" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.020113] env[62109]: DEBUG nova.network.neutron [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 909.020562] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg b8b3989d8877409e93c87d4128414fde in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.027013] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8b3989d8877409e93c87d4128414fde [ 909.038050] env[62109]: DEBUG nova.scheduler.client.report [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.040491] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg ca2f8d7071b74385aabb91de0c32b7e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.055869] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca2f8d7071b74385aabb91de0c32b7e5 [ 909.160162] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.229952] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg b572a7adb04449c9b658d96256b1917d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.272791] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b572a7adb04449c9b658d96256b1917d [ 909.297206] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.297742] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 65652df100be41c6afea2a07e78afb86 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.310138] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65652df100be41c6afea2a07e78afb86 [ 909.542758] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.833s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.543709] env[62109]: ERROR nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Traceback (most recent call last): [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self.driver.spawn(context, instance, image_meta, [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] vm_ref = self.build_virtual_machine(instance, [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] vif_infos = vmwarevif.get_vif_info(self._session, [ 909.543709] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] for vif in network_info: [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] return self._sync_wrapper(fn, *args, **kwargs) [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self.wait() [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self[:] = self._gt.wait() [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] return self._exit_event.wait() [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] current.throw(*self._exc) [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 909.544101] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] result = function(*args, **kwargs) [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] return func(*args, **kwargs) [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] raise e [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] nwinfo = self.network_api.allocate_for_instance( [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] created_port_ids = self._update_ports_for_instance( [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] with excutils.save_and_reraise_exception(): [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] self.force_reraise() [ 909.544495] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] raise self.value [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] updated_port = self._update_port( [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] _ensure_no_port_binding_failure(port) [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] raise exception.PortBindingFailed(port_id=port['id']) [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] nova.exception.PortBindingFailed: Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. [ 909.544859] env[62109]: ERROR nova.compute.manager [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] [ 909.544859] env[62109]: DEBUG nova.compute.utils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 909.546330] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.976s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.552920] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 14d2b821552d40b2948f84d8ede778f9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.552920] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Build of instance 3f99ec88-f05f-4583-b08b-d40fb37e275e was re-scheduled: Binding failed for port 2b78f53d-d85a-41f3-91bb-f8df87915fcc, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 909.554506] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 909.554506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquiring lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.554506] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Acquired lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.554506] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 909.555444] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg fbbd330b3ed14dc58949c485b6577f79 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.558997] env[62109]: DEBUG nova.compute.manager [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Received event network-vif-deleted-c42dbb9c-5fa1-493e-8235-912a6dd1e291 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 909.559411] env[62109]: DEBUG nova.compute.manager [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] [instance: bab79bb6-1638-4eee-812d-da1372134873] Received event network-changed-711f1545-3683-4e94-8275-4892d964343b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 909.559642] env[62109]: DEBUG nova.compute.manager [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] [instance: bab79bb6-1638-4eee-812d-da1372134873] Refreshing instance network info cache due to event network-changed-711f1545-3683-4e94-8275-4892d964343b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 909.559882] env[62109]: DEBUG oslo_concurrency.lockutils [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] Acquiring lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.560206] env[62109]: DEBUG oslo_concurrency.lockutils [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] Acquired lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.560424] env[62109]: DEBUG nova.network.neutron [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] [instance: bab79bb6-1638-4eee-812d-da1372134873] Refreshing network info cache for port 711f1545-3683-4e94-8275-4892d964343b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 909.560830] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] Expecting reply to msg be8c29ef63d6497dae2344f798aec052 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.567537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbbd330b3ed14dc58949c485b6577f79 [ 909.579326] env[62109]: DEBUG nova.network.neutron [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.584794] env[62109]: ERROR nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. [ 909.584794] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 909.584794] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 909.584794] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 909.584794] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 909.584794] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 909.584794] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 909.584794] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 909.584794] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 909.584794] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 909.584794] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 909.584794] env[62109]: ERROR nova.compute.manager raise self.value [ 909.584794] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 909.584794] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 909.584794] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 909.584794] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 909.585280] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 909.585280] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 909.585280] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. [ 909.585280] env[62109]: ERROR nova.compute.manager [ 909.585280] env[62109]: Traceback (most recent call last): [ 909.585280] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 909.585280] env[62109]: listener.cb(fileno) [ 909.585280] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 909.585280] env[62109]: result = function(*args, **kwargs) [ 909.585280] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 909.585280] env[62109]: return func(*args, **kwargs) [ 909.585280] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 909.585280] env[62109]: raise e [ 909.585280] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 909.585280] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 909.585280] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 909.585280] env[62109]: created_port_ids = self._update_ports_for_instance( [ 909.585280] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 909.585280] env[62109]: with excutils.save_and_reraise_exception(): [ 909.585280] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 909.585280] env[62109]: self.force_reraise() [ 909.585280] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 909.585280] env[62109]: raise self.value [ 909.585280] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 909.585280] env[62109]: updated_port = self._update_port( [ 909.585280] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 909.585280] env[62109]: _ensure_no_port_binding_failure(port) [ 909.585280] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 909.585280] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 909.586025] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. [ 909.586025] env[62109]: Removing descriptor: 19 [ 909.587657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be8c29ef63d6497dae2344f798aec052 [ 909.615181] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14d2b821552d40b2948f84d8ede778f9 [ 909.721599] env[62109]: DEBUG nova.network.neutron [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.722137] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 350ad91d83cd4ca3b2c36bc2dd03ce32 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.732480] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 350ad91d83cd4ca3b2c36bc2dd03ce32 [ 909.733732] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 909.766517] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 909.766761] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 909.766919] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.767101] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 909.767243] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.767429] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 909.767655] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 909.767812] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 909.767984] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 909.768223] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 909.768399] env[62109]: DEBUG nova.virt.hardware [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 909.769544] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247ad8a6-ca1e-46f3-bd7b-ea0377e2b2cd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.777451] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d68f77d-a67c-4b21-ad04-eaf20ed1c53b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.791208] env[62109]: ERROR nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] Traceback (most recent call last): [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] yield resources [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self.driver.spawn(context, instance, image_meta, [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self._vmops.spawn(context, instance, image_meta, injected_files, [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] vm_ref = self.build_virtual_machine(instance, [ 909.791208] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] vif_infos = vmwarevif.get_vif_info(self._session, [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] for vif in network_info: [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] return self._sync_wrapper(fn, *args, **kwargs) [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self.wait() [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self[:] = self._gt.wait() [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] return self._exit_event.wait() [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 909.791545] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] current.throw(*self._exc) [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] result = function(*args, **kwargs) [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] return func(*args, **kwargs) [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] raise e [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] nwinfo = self.network_api.allocate_for_instance( [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] created_port_ids = self._update_ports_for_instance( [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] with excutils.save_and_reraise_exception(): [ 909.791853] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self.force_reraise() [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] raise self.value [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] updated_port = self._update_port( [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] _ensure_no_port_binding_failure(port) [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] raise exception.PortBindingFailed(port_id=port['id']) [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] nova.exception.PortBindingFailed: Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. [ 909.792178] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] [ 909.792178] env[62109]: INFO nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Terminating instance [ 909.794864] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.804807] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Releasing lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.805209] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 909.805400] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 909.805709] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2aec6fa4-0473-4d79-9197-51547903e0e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.813951] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5490349c-9acf-4f10-8d44-6713bffd7a64 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.838655] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 87dff872-a469-465f-9c74-4524a2eab013 could not be found. [ 909.838972] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 909.839186] env[62109]: INFO nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Took 0.03 seconds to destroy the instance on the hypervisor. [ 909.839447] env[62109]: DEBUG oslo.service.loopingcall [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.839684] env[62109]: DEBUG nova.compute.manager [-] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 909.839885] env[62109]: DEBUG nova.network.neutron [-] [instance: 87dff872-a469-465f-9c74-4524a2eab013] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 909.857814] env[62109]: DEBUG nova.network.neutron [-] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.858597] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 58c5266b013a41999f55f1393babebe3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 909.865902] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58c5266b013a41999f55f1393babebe3 [ 910.077995] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 910.084206] env[62109]: DEBUG nova.network.neutron [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] [instance: bab79bb6-1638-4eee-812d-da1372134873] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 910.174969] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.175668] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 09657f6754024125a2513ddf80c5fe2c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 910.187702] env[62109]: DEBUG nova.network.neutron [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] [instance: bab79bb6-1638-4eee-812d-da1372134873] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.187702] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] Expecting reply to msg ade2fd28f7e04ff7b8212f8205f6662e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 910.189954] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09657f6754024125a2513ddf80c5fe2c [ 910.195914] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ade2fd28f7e04ff7b8212f8205f6662e [ 910.225333] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Releasing lock "refresh_cache-309a7bae-82f5-4b9e-ac86-e0f1803f2585" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.225775] env[62109]: DEBUG nova.compute.manager [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 910.226010] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 910.227170] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb0693f-5492-4a4e-8a2f-19187ce12c57 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.239455] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 910.239687] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-244adf8b-8f03-4136-8155-0e74256cbd45 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.246245] env[62109]: DEBUG oslo_vmware.api [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 910.246245] env[62109]: value = "task-401524" [ 910.246245] env[62109]: _type = "Task" [ 910.246245] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.257817] env[62109]: DEBUG oslo_vmware.api [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.290292] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6674d87e-e469-44b9-a5fb-2af488eb67fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.297374] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26f6b02-a431-4446-8614-3990b1cfcd90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.328917] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230369ec-52ec-4df8-b6fd-e5c4eb9a5ce0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.336296] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc48b50-1c33-45a4-94a1-69b4effad3ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.349511] env[62109]: DEBUG nova.compute.provider_tree [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.350020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 7c869f22c85a4619a0417c6b2c1cbabd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 910.359175] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c869f22c85a4619a0417c6b2c1cbabd [ 910.363810] env[62109]: DEBUG nova.network.neutron [-] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.364255] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8fd84a39b79e4344a4c416322d86da93 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 910.372284] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fd84a39b79e4344a4c416322d86da93 [ 910.681173] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Releasing lock "refresh_cache-3f99ec88-f05f-4583-b08b-d40fb37e275e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.681421] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 910.681602] env[62109]: DEBUG nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 910.681764] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 910.689544] env[62109]: DEBUG oslo_concurrency.lockutils [req-d77ec638-6507-434f-987d-eb07c50faca2 req-7a98e8ee-72a4-4d49-9332-0f6ba0bea168 service nova] Releasing lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.689967] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.690147] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 910.690668] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 6581794ababe47bd8802c606541cd01a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 910.697510] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6581794ababe47bd8802c606541cd01a [ 910.701953] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 910.702544] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 1a159d16a68449abb3493becd70b03f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 910.709553] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a159d16a68449abb3493becd70b03f7 [ 910.755892] env[62109]: DEBUG oslo_vmware.api [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401524, 'name': PowerOffVM_Task, 'duration_secs': 0.199123} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.756585] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 910.756889] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 910.757232] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a55b073a-78ca-4b56-ae84-306087e4b2c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.784214] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 910.784214] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 910.784214] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Deleting the datastore file [datastore1] 309a7bae-82f5-4b9e-ac86-e0f1803f2585 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.784214] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b60813b-3dc4-4155-8a52-34c5211a4666 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.792401] env[62109]: DEBUG oslo_vmware.api [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for the task: (returnval){ [ 910.792401] env[62109]: value = "task-401526" [ 910.792401] env[62109]: _type = "Task" [ 910.792401] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.797241] env[62109]: DEBUG oslo_vmware.api [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.853120] env[62109]: DEBUG nova.scheduler.client.report [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 910.855654] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 945c21c11f2c42fb925dc6fd228e6559 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 910.866085] env[62109]: INFO nova.compute.manager [-] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Took 1.03 seconds to deallocate network for instance. [ 910.868745] env[62109]: DEBUG nova.compute.claims [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 910.868840] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.872334] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 945c21c11f2c42fb925dc6fd228e6559 [ 911.123623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Acquiring lock "228d2a6d-6c16-472c-9326-2e4576d9648c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.123876] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Lock "228d2a6d-6c16-472c-9326-2e4576d9648c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.204975] env[62109]: DEBUG nova.network.neutron [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.205621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg dd96df242d7c4925ada52db112541a25 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 911.208880] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 911.214677] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd96df242d7c4925ada52db112541a25 [ 911.298201] env[62109]: DEBUG oslo_vmware.api [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Task: {'id': task-401526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107407} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.298459] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.298640] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 911.298814] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 911.299004] env[62109]: INFO nova.compute.manager [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Took 1.07 seconds to destroy the instance on the hypervisor. [ 911.299345] env[62109]: DEBUG oslo.service.loopingcall [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.299551] env[62109]: DEBUG nova.compute.manager [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 911.299645] env[62109]: DEBUG nova.network.neutron [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 911.325435] env[62109]: DEBUG nova.network.neutron [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 911.325990] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 15d3fc29b6474098a3f43473021ecfc0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 911.332703] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15d3fc29b6474098a3f43473021ecfc0 [ 911.333980] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.334449] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg e8fb2b8c5ab04208b083d6161778d6da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 911.342148] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8fb2b8c5ab04208b083d6161778d6da [ 911.363424] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.817s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.364079] env[62109]: ERROR nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Traceback (most recent call last): [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self.driver.spawn(context, instance, image_meta, [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] vm_ref = self.build_virtual_machine(instance, [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] vif_infos = vmwarevif.get_vif_info(self._session, [ 911.364079] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] for vif in network_info: [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] return self._sync_wrapper(fn, *args, **kwargs) [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self.wait() [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self[:] = self._gt.wait() [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] return self._exit_event.wait() [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] result = hub.switch() [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 911.364437] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] return self.greenlet.switch() [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] result = function(*args, **kwargs) [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] return func(*args, **kwargs) [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] raise e [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] nwinfo = self.network_api.allocate_for_instance( [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] created_port_ids = self._update_ports_for_instance( [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] with excutils.save_and_reraise_exception(): [ 911.364798] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] self.force_reraise() [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] raise self.value [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] updated_port = self._update_port( [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] _ensure_no_port_binding_failure(port) [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] raise exception.PortBindingFailed(port_id=port['id']) [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] nova.exception.PortBindingFailed: Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. [ 911.365167] env[62109]: ERROR nova.compute.manager [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] [ 911.365486] env[62109]: DEBUG nova.compute.utils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 911.365858] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.284s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.367778] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg ba79279531d544e382bf93b550c97968 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 911.369896] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Build of instance 1a38b70f-eabe-4b11-a371-cf971184211f was re-scheduled: Binding failed for port 2fbe7b17-bbb3-4849-8e6c-59f3ea6695f9, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 911.370400] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 911.370668] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.370856] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquired lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.371031] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 911.371459] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 7937487b725f48179c5672cc033dc25a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 911.377662] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7937487b725f48179c5672cc033dc25a [ 911.399665] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba79279531d544e382bf93b550c97968 [ 911.584243] env[62109]: DEBUG nova.compute.manager [req-7bf7cd18-660d-4869-9af9-e174532ff0f5 req-6ca4f011-5edf-4624-a512-bd02adb74296 service nova] [instance: bab79bb6-1638-4eee-812d-da1372134873] Received event network-vif-deleted-711f1545-3683-4e94-8275-4892d964343b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 911.707576] env[62109]: INFO nova.compute.manager [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] [instance: 3f99ec88-f05f-4583-b08b-d40fb37e275e] Took 1.03 seconds to deallocate network for instance. [ 911.709526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 8c8f617f489046728eca1dfc62314b7b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 911.746277] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c8f617f489046728eca1dfc62314b7b [ 911.828294] env[62109]: DEBUG nova.network.neutron [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.829289] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7b2d4784b1a04feaad62967ee24f5739 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 911.836915] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.837257] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 911.837440] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 911.838068] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b2d4784b1a04feaad62967ee24f5739 [ 911.838398] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b82ad2ae-899f-454d-a6c4-45e7792263a3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.847512] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290f35a7-7611-4dc5-9f58-657349424352 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.883504] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bab79bb6-1638-4eee-812d-da1372134873 could not be found. [ 911.883732] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 911.883911] env[62109]: INFO nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Took 0.05 seconds to destroy the instance on the hypervisor. [ 911.884221] env[62109]: DEBUG oslo.service.loopingcall [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.884664] env[62109]: DEBUG nova.compute.manager [-] [instance: bab79bb6-1638-4eee-812d-da1372134873] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 911.890619] env[62109]: DEBUG nova.network.neutron [-] [instance: bab79bb6-1638-4eee-812d-da1372134873] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 911.917109] env[62109]: DEBUG nova.network.neutron [-] [instance: bab79bb6-1638-4eee-812d-da1372134873] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 911.917634] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 28f3007741af49029120d179e9a0569f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 911.925383] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 911.930320] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28f3007741af49029120d179e9a0569f [ 912.028304] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.028820] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 5be2f1fa2d3248c5847c30b64f6b83ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 912.038915] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5be2f1fa2d3248c5847c30b64f6b83ec [ 912.077851] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cde1577-88ba-4d82-a611-1f04e07251fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.085363] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e92060d-5bc4-47c8-8b33-65e4c50d38d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.116100] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42062b25-4ad1-4859-a2f1-3ff0ee91f9ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.123039] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dacf339f-34f2-4803-8b1b-52ad81cbfcc8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.136031] env[62109]: DEBUG nova.compute.provider_tree [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.136542] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg bc20bb20d24d47a097975b8340ea0f3f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 912.143247] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc20bb20d24d47a097975b8340ea0f3f [ 912.214543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 13d7d99b345147128085dcfea50befa8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 912.244725] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13d7d99b345147128085dcfea50befa8 [ 912.331148] env[62109]: INFO nova.compute.manager [-] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Took 1.03 seconds to deallocate network for instance. [ 912.335085] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg dbf056f254ce4bcc89a25228ae2665c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 912.361890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbf056f254ce4bcc89a25228ae2665c4 [ 912.419549] env[62109]: DEBUG nova.network.neutron [-] [instance: bab79bb6-1638-4eee-812d-da1372134873] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.420027] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a1a1551b6f304d8aab3e36c63d31d920 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 912.428693] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1a1551b6f304d8aab3e36c63d31d920 [ 912.530711] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Releasing lock "refresh_cache-1a38b70f-eabe-4b11-a371-cf971184211f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.530978] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 912.531163] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.531329] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 912.545622] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 912.546162] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 08ad990c61d04d48b93f35cb705e13ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 912.552240] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08ad990c61d04d48b93f35cb705e13ba [ 912.639421] env[62109]: DEBUG nova.scheduler.client.report [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 912.641940] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 3325fc718b8d4bd29cc6cf384b511ace in queue reply_7522b64acfeb4981b1f36928b040d568 [ 912.659580] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3325fc718b8d4bd29cc6cf384b511ace [ 912.735027] env[62109]: INFO nova.scheduler.client.report [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Deleted allocations for instance 3f99ec88-f05f-4583-b08b-d40fb37e275e [ 912.741101] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Expecting reply to msg 5114465a95294d1582b7a2a0872ea7b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 912.754599] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5114465a95294d1582b7a2a0872ea7b6 [ 912.838230] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.922446] env[62109]: INFO nova.compute.manager [-] [instance: bab79bb6-1638-4eee-812d-da1372134873] Took 1.04 seconds to deallocate network for instance. [ 912.924807] env[62109]: DEBUG nova.compute.claims [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 912.924985] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.048263] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.048806] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 88a78f7a03bf40f0a1bb5183af9d7736 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 913.057527] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88a78f7a03bf40f0a1bb5183af9d7736 [ 913.144403] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.778s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.145056] env[62109]: ERROR nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Traceback (most recent call last): [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self.driver.spawn(context, instance, image_meta, [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] vm_ref = self.build_virtual_machine(instance, [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] vif_infos = vmwarevif.get_vif_info(self._session, [ 913.145056] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] for vif in network_info: [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] return self._sync_wrapper(fn, *args, **kwargs) [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self.wait() [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self[:] = self._gt.wait() [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] return self._exit_event.wait() [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] current.throw(*self._exc) [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 913.145349] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] result = function(*args, **kwargs) [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] return func(*args, **kwargs) [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] raise e [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] nwinfo = self.network_api.allocate_for_instance( [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] created_port_ids = self._update_ports_for_instance( [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] with excutils.save_and_reraise_exception(): [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] self.force_reraise() [ 913.145681] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] raise self.value [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] updated_port = self._update_port( [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] _ensure_no_port_binding_failure(port) [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] raise exception.PortBindingFailed(port_id=port['id']) [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] nova.exception.PortBindingFailed: Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. [ 913.146013] env[62109]: ERROR nova.compute.manager [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] [ 913.146013] env[62109]: DEBUG nova.compute.utils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 913.147002] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.296s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.147178] env[62109]: DEBUG nova.objects.instance [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 913.148849] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg cf0921d08b22496a891b0b72f8c56f2a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 913.150761] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Build of instance 8a6d10c8-bd2b-40dd-9897-8f30223abe81 was re-scheduled: Binding failed for port 84f69f0f-65c8-4a94-811d-05ed8f7a266a, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 913.151329] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 913.151555] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.151700] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquired lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.151857] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 913.152237] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 37a8109999584d5aa7bb5f2fe62528b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 913.160526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37a8109999584d5aa7bb5f2fe62528b6 [ 913.176334] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf0921d08b22496a891b0b72f8c56f2a [ 913.243489] env[62109]: DEBUG oslo_concurrency.lockutils [None req-18395fe6-1457-4f1a-ae2e-d1b66ac44bfb tempest-AttachVolumeTestJSON-1360449787 tempest-AttachVolumeTestJSON-1360449787-project-member] Lock "3f99ec88-f05f-4583-b08b-d40fb37e275e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.561s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.244085] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg f2a37c3a51df44efa48e5fba13852a93 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 913.255342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2a37c3a51df44efa48e5fba13852a93 [ 913.551231] env[62109]: INFO nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 1a38b70f-eabe-4b11-a371-cf971184211f] Took 1.02 seconds to deallocate network for instance. [ 913.553179] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg f1997f1ab3b44a3e83c12123f6f13a60 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 913.588431] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1997f1ab3b44a3e83c12123f6f13a60 [ 913.653676] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 0c73fb81509e4f65afd1327260baa99c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 913.660293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c73fb81509e4f65afd1327260baa99c [ 913.670774] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 913.746393] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 913.748312] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg a6a462fbff8845d4a1f9d4ab1f16e523 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 913.757434] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.757918] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 597deb11803b4103bef70adfbc362f46 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 913.767768] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 597deb11803b4103bef70adfbc362f46 [ 913.796035] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6a462fbff8845d4a1f9d4ab1f16e523 [ 914.057483] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg d4219a2c39b2476da38f289ed352c9aa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 914.091831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4219a2c39b2476da38f289ed352c9aa [ 914.158632] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.159128] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d84d21fb-8570-4504-95b8-f44dca49ad2e tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 0234faa024934f91a5dd903472d97eb1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 914.159981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.395s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.162140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 8af8fd00edd74297b9f0bbb73f8d1e10 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 914.170699] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0234faa024934f91a5dd903472d97eb1 [ 914.201926] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8af8fd00edd74297b9f0bbb73f8d1e10 [ 914.260299] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Releasing lock "refresh_cache-8a6d10c8-bd2b-40dd-9897-8f30223abe81" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.260601] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 914.260785] env[62109]: DEBUG nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 914.260952] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 914.272225] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.275709] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 914.276303] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 94a2037eb0d14374a81c12fc6385fcd1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 914.283612] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94a2037eb0d14374a81c12fc6385fcd1 [ 914.582044] env[62109]: INFO nova.scheduler.client.report [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Deleted allocations for instance 1a38b70f-eabe-4b11-a371-cf971184211f [ 914.588130] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg b4b7081dbd614920a49a1c39cb9be245 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 914.612278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4b7081dbd614920a49a1c39cb9be245 [ 914.785424] env[62109]: DEBUG nova.network.neutron [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.785424] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 9e4062c40e9448239c6933852b0265e8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 914.787510] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e4062c40e9448239c6933852b0265e8 [ 914.826552] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e15808-12b6-44d6-acf6-5bd0f2d0898f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.834247] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcfb372-7c03-4cae-b626-2efe4b7087d0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.864740] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dff3f4f-b567-4e37-9f34-a858bdd614c8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.872328] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ab2985-34f7-4095-90f2-39cc7838866d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.886078] env[62109]: DEBUG nova.compute.provider_tree [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.888350] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 3625222e5aff4fbf9a3b907c652f2e57 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 914.895189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3625222e5aff4fbf9a3b907c652f2e57 [ 915.096657] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "1a38b70f-eabe-4b11-a371-cf971184211f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.555s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.097278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 0c6a29236229463ebbc83ddd5915d525 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 915.110014] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c6a29236229463ebbc83ddd5915d525 [ 915.282120] env[62109]: INFO nova.compute.manager [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 8a6d10c8-bd2b-40dd-9897-8f30223abe81] Took 1.02 seconds to deallocate network for instance. [ 915.283909] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 2a111ffcef274e8882432d38ce79dc60 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 915.327904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a111ffcef274e8882432d38ce79dc60 [ 915.391750] env[62109]: DEBUG nova.scheduler.client.report [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.394925] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 606fcc0c93f4462cb7d35941d38420cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 915.408217] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 606fcc0c93f4462cb7d35941d38420cd [ 915.599769] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 915.601552] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 02bec68089774f41bcaef42321066ec5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 915.642593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02bec68089774f41bcaef42321066ec5 [ 915.789773] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg a565553f8ff74d28b9598765cbc8a7fc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 915.833724] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a565553f8ff74d28b9598765cbc8a7fc [ 915.897563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.737s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.902284] env[62109]: ERROR nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Traceback (most recent call last): [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self.driver.spawn(context, instance, image_meta, [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] vm_ref = self.build_virtual_machine(instance, [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] vif_infos = vmwarevif.get_vif_info(self._session, [ 915.902284] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] for vif in network_info: [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] return self._sync_wrapper(fn, *args, **kwargs) [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self.wait() [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self[:] = self._gt.wait() [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] return self._exit_event.wait() [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] current.throw(*self._exc) [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 915.902820] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] result = function(*args, **kwargs) [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] return func(*args, **kwargs) [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] raise e [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] nwinfo = self.network_api.allocate_for_instance( [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] created_port_ids = self._update_ports_for_instance( [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] with excutils.save_and_reraise_exception(): [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] self.force_reraise() [ 915.903164] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] raise self.value [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] updated_port = self._update_port( [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] _ensure_no_port_binding_failure(port) [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] raise exception.PortBindingFailed(port_id=port['id']) [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] nova.exception.PortBindingFailed: Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. [ 915.903540] env[62109]: ERROR nova.compute.manager [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] [ 915.903540] env[62109]: DEBUG nova.compute.utils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 915.905392] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Build of instance a276656a-67b0-4ceb-918f-cfb323ed09fd was re-scheduled: Binding failed for port e979a70d-ad81-4d49-a987-fcf30691d88c, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 915.907314] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 915.907550] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Acquiring lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.907700] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Acquired lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.907857] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 915.908396] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 0377631174614a608760d102ae930d6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 915.909711] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.926s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.911985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 7043f407efad423c99d27eb86304c748 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 915.924534] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0377631174614a608760d102ae930d6d [ 915.987141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7043f407efad423c99d27eb86304c748 [ 916.124709] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.313784] env[62109]: INFO nova.scheduler.client.report [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Deleted allocations for instance 8a6d10c8-bd2b-40dd-9897-8f30223abe81 [ 916.319779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 44fc104a102743139698c64bc48b64a7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 916.345199] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44fc104a102743139698c64bc48b64a7 [ 916.426505] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 916.503775] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.504318] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 4de3d01cbf76425f9381bb08903c81f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 916.512934] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4de3d01cbf76425f9381bb08903c81f7 [ 916.573605] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7912023-1936-4fc9-92f4-12c329f7753b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.581990] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40366345-4998-46ac-9728-1fc9c831a2bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.620544] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119b0700-8f0d-46f5-98df-7612000bd397 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.628170] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafe6c5b-8417-4c56-8864-50359aeed7d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.641519] env[62109]: DEBUG nova.compute.provider_tree [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.642063] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg fc7f5154312547f0b5e2002e7fed10e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 916.649095] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc7f5154312547f0b5e2002e7fed10e7 [ 916.821735] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d31f8600-9054-4158-b0cc-126d1d749c80 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "8a6d10c8-bd2b-40dd-9897-8f30223abe81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.247s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.822543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 26739315f7004f1dbe4500efae09f1fa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 916.831763] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26739315f7004f1dbe4500efae09f1fa [ 917.007028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Releasing lock "refresh_cache-a276656a-67b0-4ceb-918f-cfb323ed09fd" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.007292] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 917.007477] env[62109]: DEBUG nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 917.007646] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 917.047716] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 917.048335] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 54cb75da4578499b84198165f31dc9b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 917.055134] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54cb75da4578499b84198165f31dc9b5 [ 917.144356] env[62109]: DEBUG nova.scheduler.client.report [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.146796] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 67975f8f4f5c4936a53f3db930d15ce9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 917.159663] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67975f8f4f5c4936a53f3db930d15ce9 [ 917.325242] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 917.326823] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg bba76eeed95543b59e83135c8ebf2f36 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 917.358063] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bba76eeed95543b59e83135c8ebf2f36 [ 917.550669] env[62109]: DEBUG nova.network.neutron [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.551025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg c6cb859b8d504c6dbf27dfb0ecfbb542 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 917.560873] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6cb859b8d504c6dbf27dfb0ecfbb542 [ 917.649505] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.740s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.650162] env[62109]: ERROR nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Traceback (most recent call last): [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self.driver.spawn(context, instance, image_meta, [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] vm_ref = self.build_virtual_machine(instance, [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] vif_infos = vmwarevif.get_vif_info(self._session, [ 917.650162] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] for vif in network_info: [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] return self._sync_wrapper(fn, *args, **kwargs) [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self.wait() [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self[:] = self._gt.wait() [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] return self._exit_event.wait() [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] result = hub.switch() [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 917.650492] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] return self.greenlet.switch() [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] result = function(*args, **kwargs) [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] return func(*args, **kwargs) [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] raise e [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] nwinfo = self.network_api.allocate_for_instance( [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] created_port_ids = self._update_ports_for_instance( [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] with excutils.save_and_reraise_exception(): [ 917.650809] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] self.force_reraise() [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] raise self.value [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] updated_port = self._update_port( [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] _ensure_no_port_binding_failure(port) [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] raise exception.PortBindingFailed(port_id=port['id']) [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] nova.exception.PortBindingFailed: Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. [ 917.651122] env[62109]: ERROR nova.compute.manager [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] [ 917.651392] env[62109]: DEBUG nova.compute.utils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 917.652095] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.437s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.652921] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ac0ca181ff14484ca2ac6c34c279737a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 917.653953] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Build of instance d851f6a6-07aa-4e64-a007-8a42a8ae9c42 was re-scheduled: Binding failed for port cd11637a-e069-4299-9bbb-23a2b33fa21c, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 917.654381] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 917.654592] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquiring lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.654738] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Acquired lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.654895] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 917.655457] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg fad2aef7fdf24dad99c28488209cfc6d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 917.663069] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fad2aef7fdf24dad99c28488209cfc6d [ 917.683385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac0ca181ff14484ca2ac6c34c279737a [ 917.848910] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.053835] env[62109]: INFO nova.compute.manager [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] [instance: a276656a-67b0-4ceb-918f-cfb323ed09fd] Took 1.05 seconds to deallocate network for instance. [ 918.055718] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 173c95cd348d4787a5742828fd07ec5d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 918.098415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 173c95cd348d4787a5742828fd07ec5d [ 918.159406] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 5e08bf4114bb4b72b74dd8b8dd166ab5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 918.168332] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e08bf4114bb4b72b74dd8b8dd166ab5 [ 918.176118] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 918.279733] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.280314] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg e216257694c24846a1c61052c3e54da9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 918.289174] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e216257694c24846a1c61052c3e54da9 [ 918.563997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg c586b022a5e94bfab2ffc5bfeea6b9fc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 918.601013] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c586b022a5e94bfab2ffc5bfeea6b9fc [ 918.616606] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "a879b81d-fb5a-483b-9c2a-4a5c416c1caa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.616832] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "a879b81d-fb5a-483b-9c2a-4a5c416c1caa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.646904] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "9ab3a71f-7e26-4d29-b006-6dbebcee16e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.647306] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "9ab3a71f-7e26-4d29-b006-6dbebcee16e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.683348] env[62109]: WARNING nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 309a7bae-82f5-4b9e-ac86-e0f1803f2585 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 918.683945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 4401e67058f64b2da63efbadcea72c09 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 918.694326] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4401e67058f64b2da63efbadcea72c09 [ 918.782447] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Releasing lock "refresh_cache-d851f6a6-07aa-4e64-a007-8a42a8ae9c42" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.782701] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 918.782878] env[62109]: DEBUG nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 918.783044] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 918.797253] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 918.797845] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 46a6f096fb864de5966c7c7398c6111d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 918.804637] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46a6f096fb864de5966c7c7398c6111d [ 919.088937] env[62109]: INFO nova.scheduler.client.report [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Deleted allocations for instance a276656a-67b0-4ceb-918f-cfb323ed09fd [ 919.097267] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Expecting reply to msg 457bf16f028244269da34501c52e57ce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 919.112499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 457bf16f028244269da34501c52e57ce [ 919.186481] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance a276656a-67b0-4ceb-918f-cfb323ed09fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 919.187048] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 30faced649814e48880e28eb3f34dbe5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 919.200688] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30faced649814e48880e28eb3f34dbe5 [ 919.299818] env[62109]: DEBUG nova.network.neutron [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.300877] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 8fc777dd409a4cb7be38b9f90c7a4f15 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 919.309339] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fc777dd409a4cb7be38b9f90c7a4f15 [ 919.599387] env[62109]: DEBUG oslo_concurrency.lockutils [None req-33f21230-5d02-4b63-b679-8725564d012a tempest-ServersNegativeTestMultiTenantJSON-803762167 tempest-ServersNegativeTestMultiTenantJSON-803762167-project-member] Lock "a276656a-67b0-4ceb-918f-cfb323ed09fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.360s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.600011] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 150169e6f916406fbeea3860c4424ddc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 919.617640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 150169e6f916406fbeea3860c4424ddc [ 919.688847] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "b04cc451-a497-474f-90dd-282a469ff3c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.689092] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "b04cc451-a497-474f-90dd-282a469ff3c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.689633] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance d851f6a6-07aa-4e64-a007-8a42a8ae9c42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 919.689774] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 55da10ab-e116-4ead-90ff-c82fffb2dcc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 919.689895] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 87dff872-a469-465f-9c74-4524a2eab013 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 919.690009] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance bab79bb6-1638-4eee-812d-da1372134873 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 919.690498] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 877585082b744f86b472d560350930dc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 919.700181] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 877585082b744f86b472d560350930dc [ 919.802861] env[62109]: INFO nova.compute.manager [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] [instance: d851f6a6-07aa-4e64-a007-8a42a8ae9c42] Took 1.02 seconds to deallocate network for instance. [ 919.804597] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 66a64709d59a4cb593d56c61b74f7709 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 919.836130] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66a64709d59a4cb593d56c61b74f7709 [ 920.106133] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 920.108020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 372912ddabf64e7ca8cd1552a56f5514 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 920.141895] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 372912ddabf64e7ca8cd1552a56f5514 [ 920.193159] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 5f58014c-e132-4fad-9ba7-bc183318200f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.193774] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 6700d6d639ff46e3be94b41b7d1f94d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 920.207168] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6700d6d639ff46e3be94b41b7d1f94d4 [ 920.309728] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 3afcd2c1323242fc8b7b5d7041e7e09f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 920.343868] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3afcd2c1323242fc8b7b5d7041e7e09f [ 920.632851] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.695995] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.696663] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 2bd84af0a2574011b0b0464030edbc31 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 920.713081] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bd84af0a2574011b0b0464030edbc31 [ 920.837281] env[62109]: INFO nova.scheduler.client.report [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Deleted allocations for instance d851f6a6-07aa-4e64-a007-8a42a8ae9c42 [ 920.846213] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Expecting reply to msg 960686c26a404156a302a40d0d170bc0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 920.862857] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 960686c26a404156a302a40d0d170bc0 [ 921.199024] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 8bd1a8aa-844b-47ca-9296-0c30af695984 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 921.199690] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 140179ff3ae94d04932e70fa974d0ee4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 921.210896] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 140179ff3ae94d04932e70fa974d0ee4 [ 921.346248] env[62109]: DEBUG oslo_concurrency.lockutils [None req-08ae84bb-a61c-483a-822d-5b6111e21965 tempest-ServersTestMultiNic-293111283 tempest-ServersTestMultiNic-293111283-project-member] Lock "d851f6a6-07aa-4e64-a007-8a42a8ae9c42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.367s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.346834] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 4ad7e1b826244f1884352e3c7230d5cb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 921.361021] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ad7e1b826244f1884352e3c7230d5cb [ 921.702262] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 27fed863-1e27-4258-8b43-b8cd23e3c1c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 921.702826] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg ea757cdb8db848209e4c78f46761af41 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 921.713900] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea757cdb8db848209e4c78f46761af41 [ 921.849430] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 921.851129] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 1fda3007b01147729960d209d05c1d51 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 921.888082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fda3007b01147729960d209d05c1d51 [ 922.205469] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 228d2a6d-6c16-472c-9326-2e4576d9648c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.206079] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg bc2e4871722e420e97e0d3d85a939667 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 922.216126] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc2e4871722e420e97e0d3d85a939667 [ 922.375199] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.709594] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance a879b81d-fb5a-483b-9c2a-4a5c416c1caa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.710045] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 3eddb8a06cc5423db1762fa957bdff89 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 922.720910] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3eddb8a06cc5423db1762fa957bdff89 [ 923.213139] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 9ab3a71f-7e26-4d29-b006-6dbebcee16e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 923.213310] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 923.213456] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 923.383411] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1824f39d-a983-4bf6-8bcd-f7242faed486 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.391399] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e0fc8a-fea2-4d37-b84e-ad19a267b0f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.422078] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564f2f13-c467-4eeb-acd7-cf06e469a7f6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.430780] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82f9953-b3de-46f3-aaca-a1dc750c9972 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.443291] env[62109]: DEBUG nova.compute.provider_tree [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.443760] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 280ddd79dd9347e1a260e2133b166680 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 923.450546] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 280ddd79dd9347e1a260e2133b166680 [ 923.946099] env[62109]: DEBUG nova.scheduler.client.report [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.948626] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 1c1454e707ab496fa5ce74a50788c647 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 923.961172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c1454e707ab496fa5ce74a50788c647 [ 924.451092] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 924.451363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.799s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.451640] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.035s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.453633] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 0eede65e15a74d048381eb8e095d181a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 924.507624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0eede65e15a74d048381eb8e095d181a [ 925.139680] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8349ba-5eae-4e9e-af3e-f3a0b199e4e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.146857] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c04e6b-4776-4bdc-b998-f24afe331fa9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.199362] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d032ea-158c-4b84-8973-db77cae9d67c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.207304] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ad8eea-07d2-4685-a595-d8937bc1bbbd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.222670] env[62109]: DEBUG nova.compute.provider_tree [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.223201] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 6b7513996009480b814efcda9772e775 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 925.231743] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b7513996009480b814efcda9772e775 [ 925.728989] env[62109]: DEBUG nova.scheduler.client.report [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 925.728989] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 6e8e5c9b78a749b6aa770ee0999c8862 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 925.742125] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e8e5c9b78a749b6aa770ee0999c8862 [ 926.231494] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.780s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.232206] env[62109]: ERROR nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Traceback (most recent call last): [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self.driver.spawn(context, instance, image_meta, [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] vm_ref = self.build_virtual_machine(instance, [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] vif_infos = vmwarevif.get_vif_info(self._session, [ 926.232206] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] for vif in network_info: [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] return self._sync_wrapper(fn, *args, **kwargs) [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self.wait() [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self[:] = self._gt.wait() [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] return self._exit_event.wait() [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] current.throw(*self._exc) [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 926.232634] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] result = function(*args, **kwargs) [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] return func(*args, **kwargs) [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] raise e [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] nwinfo = self.network_api.allocate_for_instance( [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] created_port_ids = self._update_ports_for_instance( [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] with excutils.save_and_reraise_exception(): [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] self.force_reraise() [ 926.233086] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] raise self.value [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] updated_port = self._update_port( [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] _ensure_no_port_binding_failure(port) [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] raise exception.PortBindingFailed(port_id=port['id']) [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] nova.exception.PortBindingFailed: Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. [ 926.233606] env[62109]: ERROR nova.compute.manager [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] [ 926.233606] env[62109]: DEBUG nova.compute.utils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 926.234217] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.186s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.236904] env[62109]: INFO nova.compute.claims [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.239150] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 357e8e2ed6c44ab99720792da7fd2ca6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 926.240594] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Build of instance 55da10ab-e116-4ead-90ff-c82fffb2dcc6 was re-scheduled: Binding failed for port 792b62d9-f0ba-485b-9131-66603f84de93, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 926.241081] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 926.241315] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.241461] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.241663] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 926.242053] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg a99474683fbc44d4a236409ff977c163 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 926.249553] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a99474683fbc44d4a236409ff977c163 [ 926.298066] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 357e8e2ed6c44ab99720792da7fd2ca6 [ 926.744925] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 18fdc2e3d4364865ae2766f1a824bc08 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 926.754684] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18fdc2e3d4364865ae2766f1a824bc08 [ 926.765323] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 926.849122] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.849653] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 9d0934b50b9b4d14aac5befe240ff932 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 926.860079] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d0934b50b9b4d14aac5befe240ff932 [ 927.351764] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-55da10ab-e116-4ead-90ff-c82fffb2dcc6" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.352068] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 927.352203] env[62109]: DEBUG nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 927.352369] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 927.396293] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 927.396293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 2dede8823e9c415d9fd919769c20a9db in queue reply_7522b64acfeb4981b1f36928b040d568 [ 927.400120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dede8823e9c415d9fd919769c20a9db [ 927.430360] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04561a9b-9056-428a-8081-9e27a7757613 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.438316] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9da0a22-d3c9-4b14-b670-6cd25c12faaa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.478605] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfd6de9-eef5-4b17-be72-5a0d83687fda {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.487394] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb8360e-b92b-4ecc-b8d5-16b45579c1ab {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.500424] env[62109]: DEBUG nova.compute.provider_tree [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.500965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 6c333e7a0e564a9493716258fb0ded18 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 927.510151] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c333e7a0e564a9493716258fb0ded18 [ 927.895116] env[62109]: DEBUG nova.network.neutron [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.895852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg cc3e9f0d9cfc412ab040888ca7466b69 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 927.903954] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc3e9f0d9cfc412ab040888ca7466b69 [ 928.005575] env[62109]: DEBUG nova.scheduler.client.report [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.007982] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 2387db2f17fa4bd1bf094e4ab37069d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 928.019446] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2387db2f17fa4bd1bf094e4ab37069d4 [ 928.398003] env[62109]: INFO nova.compute.manager [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: 55da10ab-e116-4ead-90ff-c82fffb2dcc6] Took 1.05 seconds to deallocate network for instance. [ 928.399701] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 5ba6b63b1abe420086c20090f3772ea8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 928.441607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ba6b63b1abe420086c20090f3772ea8 [ 928.510972] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.511604] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 928.513310] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 16c1e04463244e47b191bac8547b5733 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 928.526315] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.662s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.526315] env[62109]: INFO nova.compute.claims [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.526315] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 2da20e0265e54bbead4125703c851d17 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 928.555667] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2da20e0265e54bbead4125703c851d17 [ 928.561068] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16c1e04463244e47b191bac8547b5733 [ 928.904289] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg bbbdb5eff9f543cdbeec50dc4d839b0b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 928.935679] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbbdb5eff9f543cdbeec50dc4d839b0b [ 929.016889] env[62109]: DEBUG nova.compute.utils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.017545] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 7bf8635effe84a50897ae4d28ffa331e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 929.018552] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 929.018712] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.021850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 7ffaceda340e4b579a1128414f616c39 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 929.029784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ffaceda340e4b579a1128414f616c39 [ 929.038693] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bf8635effe84a50897ae4d28ffa331e [ 929.066852] env[62109]: DEBUG nova.policy [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '558eac5e9e0e46499aed7185a9b33af7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e35f4d42ead34229b32278931e92dc0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 929.353701] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Successfully created port: 66844096-b3f6-4317-9dfc-3d1d1cd65bd5 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.428510] env[62109]: INFO nova.scheduler.client.report [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Deleted allocations for instance 55da10ab-e116-4ead-90ff-c82fffb2dcc6 [ 929.434866] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 70aa611946d747aea016a40a36ddd79c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 929.454419] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70aa611946d747aea016a40a36ddd79c [ 929.524481] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 929.526141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg d1da81f10f48456f8de87bea971b534e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 929.579883] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1da81f10f48456f8de87bea971b534e [ 929.670257] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e4fdce-a4ab-43d7-9b72-044db50b357d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.678297] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c158bb87-912a-4242-95d1-926460ef48ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.710012] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1bbd4a-7c92-4092-b3a5-ffb1108bebe5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.719132] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01624f7-53b3-48ad-aed4-dbf744340ae3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.733750] env[62109]: DEBUG nova.compute.provider_tree [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.734307] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg c4e5cd8a3edc4b808ca5038983c15949 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 929.741585] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4e5cd8a3edc4b808ca5038983c15949 [ 929.940469] env[62109]: DEBUG oslo_concurrency.lockutils [None req-6e797b24-cf71-4b5c-9170-8bd5c31743d2 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "55da10ab-e116-4ead-90ff-c82fffb2dcc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.989s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.941224] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg ea5db94792324bea852f11543067bc14 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 929.950012] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea5db94792324bea852f11543067bc14 [ 930.030622] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 5f9326f1685f40b3bac6b0a4bbc92b81 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 930.049670] env[62109]: DEBUG nova.compute.manager [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Received event network-changed-66844096-b3f6-4317-9dfc-3d1d1cd65bd5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 930.049670] env[62109]: DEBUG nova.compute.manager [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Refreshing instance network info cache due to event network-changed-66844096-b3f6-4317-9dfc-3d1d1cd65bd5. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 930.049670] env[62109]: DEBUG oslo_concurrency.lockutils [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] Acquiring lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.049670] env[62109]: DEBUG oslo_concurrency.lockutils [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] Acquired lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.049670] env[62109]: DEBUG nova.network.neutron [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Refreshing network info cache for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 930.050394] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] Expecting reply to msg 06e3409ea4834c90bd5ebd3d3926a7f4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 930.056501] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06e3409ea4834c90bd5ebd3d3926a7f4 [ 930.069060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f9326f1685f40b3bac6b0a4bbc92b81 [ 930.237450] env[62109]: DEBUG nova.scheduler.client.report [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.240576] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 91ad996638044ee5b11946ef255d64ee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 930.257344] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91ad996638044ee5b11946ef255d64ee [ 930.291870] env[62109]: ERROR nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. [ 930.291870] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 930.291870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 930.291870] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 930.291870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 930.291870] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 930.291870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 930.291870] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 930.291870] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 930.291870] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 930.291870] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 930.291870] env[62109]: ERROR nova.compute.manager raise self.value [ 930.291870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 930.291870] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 930.291870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 930.291870] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 930.292389] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 930.292389] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 930.292389] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. [ 930.292389] env[62109]: ERROR nova.compute.manager [ 930.292389] env[62109]: Traceback (most recent call last): [ 930.292389] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 930.292389] env[62109]: listener.cb(fileno) [ 930.292389] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 930.292389] env[62109]: result = function(*args, **kwargs) [ 930.292389] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 930.292389] env[62109]: return func(*args, **kwargs) [ 930.292389] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 930.292389] env[62109]: raise e [ 930.292389] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 930.292389] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 930.292389] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 930.292389] env[62109]: created_port_ids = self._update_ports_for_instance( [ 930.292389] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 930.292389] env[62109]: with excutils.save_and_reraise_exception(): [ 930.292389] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 930.292389] env[62109]: self.force_reraise() [ 930.292389] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 930.292389] env[62109]: raise self.value [ 930.292389] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 930.292389] env[62109]: updated_port = self._update_port( [ 930.292389] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 930.292389] env[62109]: _ensure_no_port_binding_failure(port) [ 930.292389] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 930.292389] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 930.293110] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. [ 930.293110] env[62109]: Removing descriptor: 19 [ 930.443527] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 930.445566] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 5f7f02961d974a19ba4454343756677d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 930.484535] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f7f02961d974a19ba4454343756677d [ 930.534151] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 930.562004] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 930.562263] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 930.562385] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.562564] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 930.562706] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.562847] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 930.563398] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 930.563573] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 930.563749] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 930.563910] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 930.564172] env[62109]: DEBUG nova.virt.hardware [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 930.565017] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22481894-6f79-4367-859f-035863b1ca24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.574039] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a4d266-ce18-4c52-acdb-6b53f48aa33d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.589804] env[62109]: ERROR nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Traceback (most recent call last): [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] yield resources [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self.driver.spawn(context, instance, image_meta, [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] vm_ref = self.build_virtual_machine(instance, [ 930.589804] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] vif_infos = vmwarevif.get_vif_info(self._session, [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] for vif in network_info: [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] return self._sync_wrapper(fn, *args, **kwargs) [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self.wait() [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self[:] = self._gt.wait() [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] return self._exit_event.wait() [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 930.590317] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] current.throw(*self._exc) [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] result = function(*args, **kwargs) [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] return func(*args, **kwargs) [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] raise e [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] nwinfo = self.network_api.allocate_for_instance( [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] created_port_ids = self._update_ports_for_instance( [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] with excutils.save_and_reraise_exception(): [ 930.590654] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self.force_reraise() [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] raise self.value [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] updated_port = self._update_port( [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] _ensure_no_port_binding_failure(port) [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] raise exception.PortBindingFailed(port_id=port['id']) [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] nova.exception.PortBindingFailed: Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. [ 930.590976] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] [ 930.590976] env[62109]: INFO nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Terminating instance [ 930.592532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Acquiring lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.716251] env[62109]: DEBUG nova.network.neutron [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 930.743247] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.228s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.743752] env[62109]: DEBUG nova.compute.manager [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 930.745492] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 7a6dc5775cf244e782e49067aa6ac1cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 930.746610] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.828s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.746721] env[62109]: DEBUG nova.objects.instance [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] [instance: 309a7bae-82f5-4b9e-ac86-e0f1803f2585] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 930.748141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Expecting reply to msg 7d2d77f35eb8452d80a079b19051e2ab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 930.778905] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d2d77f35eb8452d80a079b19051e2ab [ 930.779532] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a6dc5775cf244e782e49067aa6ac1cd [ 930.820806] env[62109]: DEBUG nova.network.neutron [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.821504] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] Expecting reply to msg 3078a92e25984161873e787abb723305 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 930.830295] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3078a92e25984161873e787abb723305 [ 930.963072] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.253040] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Expecting reply to msg 2b649eede8884bd5a9ba070100402783 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 931.254991] env[62109]: DEBUG nova.compute.utils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 931.255572] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 2d3514e6a81e4f19b3dc46a81555e5f0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 931.256605] env[62109]: DEBUG nova.compute.manager [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 931.261391] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b649eede8884bd5a9ba070100402783 [ 931.265122] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d3514e6a81e4f19b3dc46a81555e5f0 [ 931.323695] env[62109]: DEBUG oslo_concurrency.lockutils [req-7c47f584-b320-4767-9082-d0e0227f4dbd req-c0f3dddf-036f-42da-bd67-56e52e238ded service nova] Releasing lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.324138] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Acquired lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.324335] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 931.324770] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg b49047814e9a4c85b49ae806620f5abd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 931.331229] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b49047814e9a4c85b49ae806620f5abd [ 931.437583] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "d622ca94-7f5c-47f4-8077-ff37f64eea02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.438496] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "d622ca94-7f5c-47f4-8077-ff37f64eea02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.758656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.759059] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4d7c83a5-f13b-4e04-aab7-5644b6fd69f5 tempest-ServersAdmin275Test-2021021546 tempest-ServersAdmin275Test-2021021546-project-admin] Expecting reply to msg c737dcee49814bee927295760d50aaa4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 931.760204] env[62109]: DEBUG nova.compute.manager [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 931.761928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg b33a5be6ba2f40839db5a98c9a6710c0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 931.763428] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.895s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.765195] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 733451c245574102be48e9e958635406 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 931.780827] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c737dcee49814bee927295760d50aaa4 [ 931.791438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b33a5be6ba2f40839db5a98c9a6710c0 [ 931.794831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 733451c245574102be48e9e958635406 [ 931.842066] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 931.925867] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.926474] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 8f35834f29964f3d9fd9a4bacce47d22 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 931.938997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f35834f29964f3d9fd9a4bacce47d22 [ 932.076494] env[62109]: DEBUG nova.compute.manager [req-71894d4b-54d7-46cd-b66f-ca24aeb01065 req-6b664f33-4961-467d-9147-05b43245dea6 service nova] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Received event network-vif-deleted-66844096-b3f6-4317-9dfc-3d1d1cd65bd5 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 932.271930] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg a5eaeeec5a0d42958b843693e549dd5c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 932.309678] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5eaeeec5a0d42958b843693e549dd5c [ 932.418043] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a967ed-ce82-4450-9966-06ace04495c4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.426549] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff1ff3f0-ff9c-4dd5-9c2d-845a03eed2bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.429821] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Releasing lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.430209] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 932.430397] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 932.430642] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81cf3a92-ebef-4bcd-8cdc-a0be1c8c6a53 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.461361] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ab9569-cb8a-4fe3-ab53-d0de17b94c61 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.466192] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03901965-c910-4d3f-a577-e195865eeacf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.480791] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe025c7-8d75-40d0-90dc-4f70f378cedc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.487653] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f58014c-e132-4fad-9ba7-bc183318200f could not be found. [ 932.487865] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 932.488081] env[62109]: INFO nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Took 0.06 seconds to destroy the instance on the hypervisor. [ 932.488323] env[62109]: DEBUG oslo.service.loopingcall [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.488889] env[62109]: DEBUG nova.compute.manager [-] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 932.488992] env[62109]: DEBUG nova.network.neutron [-] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 932.498081] env[62109]: DEBUG nova.compute.provider_tree [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.498790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 90fdef75a8e4426db0cfd74281d6bc2b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 932.505897] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90fdef75a8e4426db0cfd74281d6bc2b [ 932.511585] env[62109]: DEBUG nova.network.neutron [-] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 932.512059] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c46d2eddb1334198ac34c13dfd51c1b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 932.518176] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c46d2eddb1334198ac34c13dfd51c1b6 [ 932.775253] env[62109]: DEBUG nova.compute.manager [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 932.797778] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 932.798056] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 932.798219] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.798399] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 932.798540] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.798681] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 932.798878] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 932.799066] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 932.799247] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 932.799408] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 932.799576] env[62109]: DEBUG nova.virt.hardware [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 932.800427] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40fe2eb-e35c-4ad6-9068-07b7f75dc10c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.807519] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f873d987-bc16-4ebd-abf2-ce10d3d1e0ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.820075] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.825367] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Creating folder: Project (393d372817d746aa82df4745529f66f3). Parent ref: group-v108864. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 932.825619] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39725828-df84-4956-b5d5-65b7b3733637 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.834339] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Created folder: Project (393d372817d746aa82df4745529f66f3) in parent group-v108864. [ 932.834511] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Creating folder: Instances. Parent ref: group-v108888. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 932.834708] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ff4f379-d69b-482b-9247-f001e687cfd9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.843068] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Created folder: Instances in parent group-v108888. [ 932.843253] env[62109]: DEBUG oslo.service.loopingcall [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.843426] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 932.843600] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f19d9a3-e44b-491e-b77e-7759fdc85913 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.858825] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.858825] env[62109]: value = "task-401533" [ 932.858825] env[62109]: _type = "Task" [ 932.858825] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.865475] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401533, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.001852] env[62109]: DEBUG nova.scheduler.client.report [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.004704] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 01b424bba6314d7b89feab2ae2fbe9de in queue reply_7522b64acfeb4981b1f36928b040d568 [ 933.013720] env[62109]: DEBUG nova.network.neutron [-] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.014344] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5a3696147d0f45a58e3cab5ba8424c31 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 933.018837] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01b424bba6314d7b89feab2ae2fbe9de [ 933.023206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a3696147d0f45a58e3cab5ba8424c31 [ 933.369608] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401533, 'name': CreateVM_Task, 'duration_secs': 0.312417} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.369756] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 933.370244] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.370446] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.370824] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 933.371108] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-114028ac-0a47-4bf0-b604-8f159dfdea44 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.375544] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 933.375544] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]521560d0-fdeb-f534-29bf-36e2c7cdf090" [ 933.375544] env[62109]: _type = "Task" [ 933.375544] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.383514] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]521560d0-fdeb-f534-29bf-36e2c7cdf090, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.507583] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.744s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.508573] env[62109]: ERROR nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] Traceback (most recent call last): [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self.driver.spawn(context, instance, image_meta, [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self._vmops.spawn(context, instance, image_meta, injected_files, [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] vm_ref = self.build_virtual_machine(instance, [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] vif_infos = vmwarevif.get_vif_info(self._session, [ 933.508573] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] for vif in network_info: [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] return self._sync_wrapper(fn, *args, **kwargs) [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self.wait() [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self[:] = self._gt.wait() [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] return self._exit_event.wait() [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] result = hub.switch() [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 933.508982] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] return self.greenlet.switch() [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] result = function(*args, **kwargs) [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] return func(*args, **kwargs) [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] raise e [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] nwinfo = self.network_api.allocate_for_instance( [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] created_port_ids = self._update_ports_for_instance( [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] with excutils.save_and_reraise_exception(): [ 933.509325] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] self.force_reraise() [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] raise self.value [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] updated_port = self._update_port( [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] _ensure_no_port_binding_failure(port) [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] raise exception.PortBindingFailed(port_id=port['id']) [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] nova.exception.PortBindingFailed: Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. [ 933.509755] env[62109]: ERROR nova.compute.manager [instance: 87dff872-a469-465f-9c74-4524a2eab013] [ 933.510285] env[62109]: DEBUG nova.compute.utils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 933.511580] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.674s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.511778] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.513962] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.589s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.515914] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg ca0e46a1cc42437dbc05e1475ec3c76c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 933.517172] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Build of instance 87dff872-a469-465f-9c74-4524a2eab013 was re-scheduled: Binding failed for port c42dbb9c-5fa1-493e-8235-912a6dd1e291, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 933.517618] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 933.517840] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquiring lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.518056] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Acquired lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.518227] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 933.518662] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 57e75c5c1d764da587127025255209b1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 933.519766] env[62109]: INFO nova.compute.manager [-] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Took 1.03 seconds to deallocate network for instance. [ 933.525088] env[62109]: DEBUG nova.compute.claims [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 933.525210] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.525674] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57e75c5c1d764da587127025255209b1 [ 933.534568] env[62109]: INFO nova.scheduler.client.report [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Deleted allocations for instance 309a7bae-82f5-4b9e-ac86-e0f1803f2585 [ 933.537415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg d58d641db8634d589a1acd9174b947bf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 933.558119] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca0e46a1cc42437dbc05e1475ec3c76c [ 933.579523] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d58d641db8634d589a1acd9174b947bf [ 933.886094] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]521560d0-fdeb-f534-29bf-36e2c7cdf090, 'name': SearchDatastore_Task, 'duration_secs': 0.024325} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.886452] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.886613] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.886852] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.886999] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.887173] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.887427] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c9da254-014f-4104-a028-1acebf20e6d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.898042] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.898196] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 933.898882] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41865200-18d1-4878-a762-3bfb054470d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.903375] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 933.903375] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52cd5f55-9152-d942-2069-a5cadd4d7c53" [ 933.903375] env[62109]: _type = "Task" [ 933.903375] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.911140] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52cd5f55-9152-d942-2069-a5cadd4d7c53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.044593] env[62109]: DEBUG oslo_concurrency.lockutils [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Lock "309a7bae-82f5-4b9e-ac86-e0f1803f2585" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.030s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.044969] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-61ea93d3-39e5-4feb-a008-68e655cff277 tempest-ServersAdmin275Test-1979464108 tempest-ServersAdmin275Test-1979464108-project-member] Expecting reply to msg 251d0b7a2e6045398571dfbb58b0e0c9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 934.047193] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 934.057891] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 251d0b7a2e6045398571dfbb58b0e0c9 [ 934.154374] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.155484] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg a69aa6f639e044a7b23f2f2e18e5cb78 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 934.165099] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a69aa6f639e044a7b23f2f2e18e5cb78 [ 934.175382] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df12cb61-41f3-48c2-948f-7e3ca4317ba1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.183232] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57c4922-2572-4f3d-91fe-84fc78217727 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.214713] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f8b39b-44f3-460a-95a1-718e1bc39c8a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.222530] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6900a73-c424-4079-bf75-37e786b071ef {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.236917] env[62109]: DEBUG nova.compute.provider_tree [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.237709] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 609d4c70c9434186be7d235c63bd857b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 934.245110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 609d4c70c9434186be7d235c63bd857b [ 934.416663] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52cd5f55-9152-d942-2069-a5cadd4d7c53, 'name': SearchDatastore_Task, 'duration_secs': 0.007711} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.418655] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26157b74-79ff-46e6-95f9-24b333cf11f9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.426654] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 934.426654] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52c8365e-f569-42d1-ad74-7aa9bf81c303" [ 934.426654] env[62109]: _type = "Task" [ 934.426654] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.435581] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52c8365e-f569-42d1-ad74-7aa9bf81c303, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.656995] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Releasing lock "refresh_cache-87dff872-a469-465f-9c74-4524a2eab013" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.657208] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 934.657393] env[62109]: DEBUG nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 934.657559] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 934.673544] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 934.674117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg e2c7d3925fb94cbbb22a278077431b10 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 934.681691] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2c7d3925fb94cbbb22a278077431b10 [ 934.741141] env[62109]: DEBUG nova.scheduler.client.report [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 934.743822] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg b5f3f4af25e84bb28b17fa5e592e46e0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 934.755738] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5f3f4af25e84bb28b17fa5e592e46e0 [ 934.937023] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52c8365e-f569-42d1-ad74-7aa9bf81c303, 'name': SearchDatastore_Task, 'duration_secs': 0.009165} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.937332] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.937475] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3/0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 934.937723] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12d3cd3f-60ac-4713-97f6-1a92d89c8c73 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.944488] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 934.944488] env[62109]: value = "task-401535" [ 934.944488] env[62109]: _type = "Task" [ 934.944488] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.952208] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.176108] env[62109]: DEBUG nova.network.neutron [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.176715] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 45da472a93d146b7a6b4d8cce224083c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 935.186297] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45da472a93d146b7a6b4d8cce224083c [ 935.246619] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.733s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.247289] env[62109]: ERROR nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] Traceback (most recent call last): [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self.driver.spawn(context, instance, image_meta, [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self._vmops.spawn(context, instance, image_meta, injected_files, [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] vm_ref = self.build_virtual_machine(instance, [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] vif_infos = vmwarevif.get_vif_info(self._session, [ 935.247289] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] for vif in network_info: [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] return self._sync_wrapper(fn, *args, **kwargs) [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self.wait() [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self[:] = self._gt.wait() [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] return self._exit_event.wait() [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] current.throw(*self._exc) [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 935.247690] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] result = function(*args, **kwargs) [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] return func(*args, **kwargs) [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] raise e [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] nwinfo = self.network_api.allocate_for_instance( [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] created_port_ids = self._update_ports_for_instance( [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] with excutils.save_and_reraise_exception(): [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] self.force_reraise() [ 935.248090] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] raise self.value [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] updated_port = self._update_port( [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] _ensure_no_port_binding_failure(port) [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] raise exception.PortBindingFailed(port_id=port['id']) [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] nova.exception.PortBindingFailed: Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. [ 935.248465] env[62109]: ERROR nova.compute.manager [instance: bab79bb6-1638-4eee-812d-da1372134873] [ 935.248465] env[62109]: DEBUG nova.compute.utils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 935.249608] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.977s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.251135] env[62109]: INFO nova.compute.claims [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.252815] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 18472a0c8607488a8179161d49e38751 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 935.260112] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Build of instance bab79bb6-1638-4eee-812d-da1372134873 was re-scheduled: Binding failed for port 711f1545-3683-4e94-8275-4892d964343b, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 935.260112] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 935.260112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.260112] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.260112] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 935.262263] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg cc35bbc010bb413c91f4576bf2b2e201 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 935.268559] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc35bbc010bb413c91f4576bf2b2e201 [ 935.312932] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18472a0c8607488a8179161d49e38751 [ 935.453909] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401535, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452773} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.454360] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3/0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 935.454746] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.455130] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b38a474-d7e5-4861-b3d1-039a9ac39171 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.461242] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 935.461242] env[62109]: value = "task-401537" [ 935.461242] env[62109]: _type = "Task" [ 935.461242] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.470818] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401537, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.678804] env[62109]: INFO nova.compute.manager [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] [instance: 87dff872-a469-465f-9c74-4524a2eab013] Took 1.02 seconds to deallocate network for instance. [ 935.680972] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 2af1b42c23eb4667bbee12672e0c39b7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 935.717410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2af1b42c23eb4667bbee12672e0c39b7 [ 935.762579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 19b61a73b9d94af187ca31b2f0df875f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 935.769676] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19b61a73b9d94af187ca31b2f0df875f [ 935.785007] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 935.863864] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.864567] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 3fd09ff798664f069e2edd62f409643e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 935.873591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fd09ff798664f069e2edd62f409643e [ 935.971428] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401537, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064174} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.972096] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.972974] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4d3cef-927c-4a92-a794-9884e05a50f3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.999165] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3/0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.999642] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa3622a4-8cc0-47f3-ad48-5dec316476a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.021008] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 936.021008] env[62109]: value = "task-401538" [ 936.021008] env[62109]: _type = "Task" [ 936.021008] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.030462] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401538, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.186868] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 62f979842e054db692dba6aad7e84147 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 936.228754] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62f979842e054db692dba6aad7e84147 [ 936.373678] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-bab79bb6-1638-4eee-812d-da1372134873" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.374212] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 936.374505] env[62109]: DEBUG nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 936.374808] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 936.389151] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 936.389806] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 9e8807250828408680c2eb1973828489 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 936.400484] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e8807250828408680c2eb1973828489 [ 936.411452] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0633b8-c03c-449c-a273-882191173454 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.419913] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a013ab9-d14e-479d-8eb5-c4b45fda9f24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.450313] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c1e18e-9293-40c7-96a5-9a7037f25f82 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.457287] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b13637b-c1ea-492c-b366-b813199ce3ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.470689] env[62109]: DEBUG nova.compute.provider_tree [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.471339] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 80be294298fa4231a43a3fcb430cf93a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 936.481231] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80be294298fa4231a43a3fcb430cf93a [ 936.531369] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401538, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.742586] env[62109]: INFO nova.scheduler.client.report [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Deleted allocations for instance 87dff872-a469-465f-9c74-4524a2eab013 [ 936.751608] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Expecting reply to msg 090b2f257d7b4208825539c46e7674ee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 936.763616] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 090b2f257d7b4208825539c46e7674ee [ 936.898310] env[62109]: DEBUG nova.network.neutron [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.898847] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 427f57f9e6404141a04b9ed43d769b57 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 936.907166] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 427f57f9e6404141a04b9ed43d769b57 [ 936.973731] env[62109]: DEBUG nova.scheduler.client.report [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.976084] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg e374300d62664804a4f738bb1863e3b2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 936.986480] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e374300d62664804a4f738bb1863e3b2 [ 937.031810] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401538, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.254776] env[62109]: DEBUG oslo_concurrency.lockutils [None req-4a9a2bdf-5492-4120-8c29-b5c2133cce17 tempest-AttachInterfacesTestJSON-1305547194 tempest-AttachInterfacesTestJSON-1305547194-project-member] Lock "87dff872-a469-465f-9c74-4524a2eab013" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.033s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.255408] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 1ddb34eabd394f62b20eefbd322b45bb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 937.273136] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ddb34eabd394f62b20eefbd322b45bb [ 937.408391] env[62109]: INFO nova.compute.manager [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: bab79bb6-1638-4eee-812d-da1372134873] Took 1.03 seconds to deallocate network for instance. [ 937.410217] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg d695ed53e301472e8bbab186a8702fff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 937.459025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d695ed53e301472e8bbab186a8702fff [ 937.479261] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.479831] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 937.482135] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 477a1aa91c8d49948d804de679a2e189 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 937.483336] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.359s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.485181] env[62109]: INFO nova.compute.claims [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.487012] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 64d0a1beca0944f5bc7de18209c0d869 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 937.522625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 477a1aa91c8d49948d804de679a2e189 [ 937.528675] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64d0a1beca0944f5bc7de18209c0d869 [ 937.532704] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401538, 'name': ReconfigVM_Task, 'duration_secs': 1.259862} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.532973] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3/0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.533593] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c975f24-6435-46c1-9b08-485868316f2f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.539984] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 937.539984] env[62109]: value = "task-401540" [ 937.539984] env[62109]: _type = "Task" [ 937.539984] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.548243] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401540, 'name': Rename_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.762950] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 937.764816] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg e13442d363c4410a87668de238a9e156 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 937.801622] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e13442d363c4410a87668de238a9e156 [ 937.916373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 5818366a601e442188eb99565a98bdc8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 937.965671] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5818366a601e442188eb99565a98bdc8 [ 937.996731] env[62109]: DEBUG nova.compute.utils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.997383] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 174dbbbaa0634eb0b2c6933d17d720de in queue reply_7522b64acfeb4981b1f36928b040d568 [ 937.999744] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 10a30837e3224c50a2a13487ef235163 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 938.004974] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 938.004974] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 938.014006] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 174dbbbaa0634eb0b2c6933d17d720de [ 938.014903] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10a30837e3224c50a2a13487ef235163 [ 938.049341] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401540, 'name': Rename_Task, 'duration_secs': 0.218284} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.049613] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 938.050562] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb7d4ba6-de28-4cc3-84db-bb361d09d111 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.058502] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 938.058502] env[62109]: value = "task-401541" [ 938.058502] env[62109]: _type = "Task" [ 938.058502] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.072126] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401541, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.076319] env[62109]: DEBUG nova.policy [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de2402e819b745498f3b594ac0c25d69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c386f1cfdf24c9ab1ab01f5a00ed743', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 938.289523] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.443322] env[62109]: INFO nova.scheduler.client.report [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Deleted allocations for instance bab79bb6-1638-4eee-812d-da1372134873 [ 938.449780] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 5c38e537c7b74d2792a76d60743dcb7f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 938.466231] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c38e537c7b74d2792a76d60743dcb7f [ 938.505894] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 938.507682] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 97af64ea6129440ea375c58b549ae588 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 938.544686] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97af64ea6129440ea375c58b549ae588 [ 938.570711] env[62109]: DEBUG oslo_vmware.api [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401541, 'name': PowerOnVM_Task, 'duration_secs': 0.490592} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.570974] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 938.571173] env[62109]: INFO nova.compute.manager [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Took 5.80 seconds to spawn the instance on the hypervisor. [ 938.571351] env[62109]: DEBUG nova.compute.manager [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.572187] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30439e3-a948-4459-bd5a-6f6b3f3f1c71 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.581941] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 30f0f28efba3442994e6f2fd8750807a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 938.621029] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Successfully created port: 803e3dd5-6dbb-437e-931c-d3616c54eafb {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.623405] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30f0f28efba3442994e6f2fd8750807a [ 938.645543] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7967c349-c9e2-4837-976a-f4d66f7ea1a2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.653127] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2fdc49-9174-4a89-805f-e5a336a9e817 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.681928] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e36dab-280b-48f1-86eb-4abf14d8ab55 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.689396] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444e28d6-215d-49d3-8446-085bb0971010 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.707793] env[62109]: DEBUG nova.compute.provider_tree [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.708374] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg bc6652d58cb64c5c95cb93c4efc3c2e6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 938.715477] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc6652d58cb64c5c95cb93c4efc3c2e6 [ 938.952101] env[62109]: DEBUG oslo_concurrency.lockutils [None req-d9f75258-1b52-46c2-9683-603460492b4b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "bab79bb6-1638-4eee-812d-da1372134873" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.052s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.014612] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 1dddec23733b467ca41460735266e287 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 939.058940] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dddec23733b467ca41460735266e287 [ 939.094913] env[62109]: INFO nova.compute.manager [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Took 31.26 seconds to build instance. [ 939.095257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 22993c284dd743abaa9e4a6f1e55a2b7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 939.106485] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22993c284dd743abaa9e4a6f1e55a2b7 [ 939.211348] env[62109]: DEBUG nova.scheduler.client.report [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 939.214280] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 2d3f84df19ba4ba99f0ec4e599b0b54f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 939.253967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d3f84df19ba4ba99f0ec4e599b0b54f [ 939.307078] env[62109]: DEBUG nova.compute.manager [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Received event network-changed-803e3dd5-6dbb-437e-931c-d3616c54eafb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 939.307257] env[62109]: DEBUG nova.compute.manager [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Refreshing instance network info cache due to event network-changed-803e3dd5-6dbb-437e-931c-d3616c54eafb. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 939.307473] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] Acquiring lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.307604] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] Acquired lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.307757] env[62109]: DEBUG nova.network.neutron [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Refreshing network info cache for port 803e3dd5-6dbb-437e-931c-d3616c54eafb {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 939.308259] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] Expecting reply to msg fe97fd08ec30484bbd9212d56695f2fd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 939.316640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe97fd08ec30484bbd9212d56695f2fd [ 939.522691] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 939.543219] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 939.543514] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 939.543726] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.543981] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 939.544180] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.544332] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 939.544544] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 939.544695] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 939.544852] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 939.545011] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 939.545174] env[62109]: DEBUG nova.virt.hardware [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 939.546024] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8414fa7-acb8-492a-91df-22f94dd904fd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.554274] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b90aa5-6cdc-499a-8fee-c17018db9f33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.596765] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5213cfec-48e3-4e2b-9180-e27103bafb9a tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.295s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.623708] env[62109]: ERROR nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. [ 939.623708] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 939.623708] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 939.623708] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 939.623708] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 939.623708] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 939.623708] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 939.623708] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 939.623708] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 939.623708] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 939.623708] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 939.623708] env[62109]: ERROR nova.compute.manager raise self.value [ 939.623708] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 939.623708] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 939.623708] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 939.623708] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 939.624247] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 939.624247] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 939.624247] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. [ 939.624247] env[62109]: ERROR nova.compute.manager [ 939.624247] env[62109]: Traceback (most recent call last): [ 939.624247] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 939.624247] env[62109]: listener.cb(fileno) [ 939.624247] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 939.624247] env[62109]: result = function(*args, **kwargs) [ 939.624247] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 939.624247] env[62109]: return func(*args, **kwargs) [ 939.624247] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 939.624247] env[62109]: raise e [ 939.624247] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 939.624247] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 939.624247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 939.624247] env[62109]: created_port_ids = self._update_ports_for_instance( [ 939.624247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 939.624247] env[62109]: with excutils.save_and_reraise_exception(): [ 939.624247] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 939.624247] env[62109]: self.force_reraise() [ 939.624247] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 939.624247] env[62109]: raise self.value [ 939.624247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 939.624247] env[62109]: updated_port = self._update_port( [ 939.624247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 939.624247] env[62109]: _ensure_no_port_binding_failure(port) [ 939.624247] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 939.624247] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 939.626480] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. [ 939.626480] env[62109]: Removing descriptor: 19 [ 939.626480] env[62109]: ERROR nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Traceback (most recent call last): [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] yield resources [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self.driver.spawn(context, instance, image_meta, [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self._vmops.spawn(context, instance, image_meta, injected_files, [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 939.626480] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] vm_ref = self.build_virtual_machine(instance, [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] vif_infos = vmwarevif.get_vif_info(self._session, [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] for vif in network_info: [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] return self._sync_wrapper(fn, *args, **kwargs) [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self.wait() [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self[:] = self._gt.wait() [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] return self._exit_event.wait() [ 939.627290] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] result = hub.switch() [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] return self.greenlet.switch() [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] result = function(*args, **kwargs) [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] return func(*args, **kwargs) [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] raise e [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] nwinfo = self.network_api.allocate_for_instance( [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 939.627671] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] created_port_ids = self._update_ports_for_instance( [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] with excutils.save_and_reraise_exception(): [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self.force_reraise() [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] raise self.value [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] updated_port = self._update_port( [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] _ensure_no_port_binding_failure(port) [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 939.628047] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] raise exception.PortBindingFailed(port_id=port['id']) [ 939.628364] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] nova.exception.PortBindingFailed: Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. [ 939.628364] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] [ 939.628364] env[62109]: INFO nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Terminating instance [ 939.628364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Acquiring lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.720113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.720113] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 939.720113] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 71db48ead73e42fdb58034b612c38de0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 939.728405] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.871s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.729760] env[62109]: INFO nova.compute.claims [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.731439] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 8604cc7d1c254ed59bc1bfd55c5ba6d2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 939.766229] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71db48ead73e42fdb58034b612c38de0 [ 939.771947] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8604cc7d1c254ed59bc1bfd55c5ba6d2 [ 939.833643] env[62109]: DEBUG nova.network.neutron [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.974822] env[62109]: DEBUG nova.network.neutron [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.975428] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] Expecting reply to msg d5073a5b54db4ac09be5bc389eac79b1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 939.983878] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5073a5b54db4ac09be5bc389eac79b1 [ 940.114350] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg b803885a95b247efbf0872f6d07e59ae in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.123712] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b803885a95b247efbf0872f6d07e59ae [ 940.236082] env[62109]: DEBUG nova.compute.utils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 940.236818] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d355405a73114ea0a3bbc50a003e0551 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.239575] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 31ce77ee7a6c48409bdc27b55a59c6f3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.239661] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 940.239780] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 940.247211] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31ce77ee7a6c48409bdc27b55a59c6f3 [ 940.251960] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d355405a73114ea0a3bbc50a003e0551 [ 940.295131] env[62109]: DEBUG nova.policy [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d003a5943d141cc9165e43148dec381', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a669cc62964b05bdaa71442c08a566', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 940.449033] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Acquiring lock "49a0249a-f322-47f6-b723-2af2b701902c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.449263] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Lock "49a0249a-f322-47f6-b723-2af2b701902c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.449716] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 91a63cfe2d33456ba3e652206d51fd46 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.458997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91a63cfe2d33456ba3e652206d51fd46 [ 940.477412] env[62109]: DEBUG oslo_concurrency.lockutils [req-1d5941f4-431f-4390-9aa0-49ba842d1cc3 req-6d096261-3a13-4a1e-ab10-6b615691d9a3 service nova] Releasing lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.477798] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Acquired lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.477977] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 940.479191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 9d32fec1ad3d42c2b69c50398425c80d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.485189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d32fec1ad3d42c2b69c50398425c80d [ 940.617370] env[62109]: INFO nova.compute.manager [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Rebuilding instance [ 940.648506] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Successfully created port: 09fb75f7-ed62-435c-bde1-9bcbab653d49 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 940.658687] env[62109]: DEBUG nova.compute.manager [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 940.659587] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddbab4a-2afa-4318-beca-129b76e369e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.667288] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg e7bc5c8cdc56430a8380e643549be014 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.698823] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7bc5c8cdc56430a8380e643549be014 [ 940.743213] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 940.744924] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg b56346af03184fd3a2bfe3143a175b3d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.784655] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b56346af03184fd3a2bfe3143a175b3d [ 940.952030] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 940.954345] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg afa53479b2564846a9d7b68573c70f93 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.981160] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7c0c71-eb84-443f-a6cb-d34a3bf2a69c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.992980] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.993227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.993683] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg ae606877941549bcbeccbec143958515 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 940.995208] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0401e8d7-e158-40c6-8493-b78a3cb58aa5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.027562] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 941.029806] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae606877941549bcbeccbec143958515 [ 941.030302] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afa53479b2564846a9d7b68573c70f93 [ 941.031128] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1fb027-58d9-4cbf-b02b-4193e6c1460d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.040216] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbba0a4d-e78c-44e2-ae3e-f9a477474f74 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.054067] env[62109]: DEBUG nova.compute.provider_tree [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.054564] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 849675a98ce54a6dbf54716542f61896 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 941.061578] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 849675a98ce54a6dbf54716542f61896 [ 941.116703] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.117246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 5aa3ba7499f24cbe80e79ce09631e20b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 941.129305] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aa3ba7499f24cbe80e79ce09631e20b [ 941.170409] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 941.170704] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dae7a6a-9b73-4a0e-8b15-af502f96ec17 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.177829] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 941.177829] env[62109]: value = "task-401543" [ 941.177829] env[62109]: _type = "Task" [ 941.177829] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.186021] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.249784] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 81dfed0c06614f3eba0194753bad2fa1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 941.291357] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81dfed0c06614f3eba0194753bad2fa1 [ 941.344538] env[62109]: DEBUG nova.compute.manager [req-0234ca5a-4029-4013-83b8-6e009b6b10fb req-4213ddc3-4edc-4239-839f-fe4adc64181f service nova] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Received event network-vif-deleted-803e3dd5-6dbb-437e-931c-d3616c54eafb {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 941.480233] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.499952] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 941.501800] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a81ab67dd3594987ba8ae5e1341343c9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 941.538589] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a81ab67dd3594987ba8ae5e1341343c9 [ 941.559757] env[62109]: DEBUG nova.scheduler.client.report [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.559757] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 08c71ad65c184f08833457c1493ae891 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 941.572826] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08c71ad65c184f08833457c1493ae891 [ 941.592624] env[62109]: ERROR nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. [ 941.592624] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 941.592624] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 941.592624] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 941.592624] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 941.592624] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 941.592624] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 941.592624] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 941.592624] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 941.592624] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 941.592624] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 941.592624] env[62109]: ERROR nova.compute.manager raise self.value [ 941.592624] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 941.592624] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 941.592624] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 941.592624] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 941.593135] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 941.593135] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 941.593135] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. [ 941.593135] env[62109]: ERROR nova.compute.manager [ 941.593135] env[62109]: Traceback (most recent call last): [ 941.593135] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 941.593135] env[62109]: listener.cb(fileno) [ 941.593135] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 941.593135] env[62109]: result = function(*args, **kwargs) [ 941.593135] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 941.593135] env[62109]: return func(*args, **kwargs) [ 941.593135] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 941.593135] env[62109]: raise e [ 941.593135] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 941.593135] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 941.593135] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 941.593135] env[62109]: created_port_ids = self._update_ports_for_instance( [ 941.593710] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 941.593710] env[62109]: with excutils.save_and_reraise_exception(): [ 941.593710] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 941.593710] env[62109]: self.force_reraise() [ 941.593710] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 941.593710] env[62109]: raise self.value [ 941.593710] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 941.593710] env[62109]: updated_port = self._update_port( [ 941.593710] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 941.593710] env[62109]: _ensure_no_port_binding_failure(port) [ 941.593710] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 941.593710] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 941.593710] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. [ 941.593710] env[62109]: Removing descriptor: 19 [ 941.620113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Releasing lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.620574] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 941.620776] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 941.621071] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16170f34-7dbe-4c9d-9c8d-2b1916afa8b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.629388] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd5ed6f-68db-4240-b5a9-392f79fb84ce {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.652594] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8bd1a8aa-844b-47ca-9296-0c30af695984 could not be found. [ 941.653447] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.653447] env[62109]: INFO nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Took 0.03 seconds to destroy the instance on the hypervisor. [ 941.653447] env[62109]: DEBUG oslo.service.loopingcall [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.653619] env[62109]: DEBUG nova.compute.manager [-] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 941.653773] env[62109]: DEBUG nova.network.neutron [-] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 941.668983] env[62109]: DEBUG nova.network.neutron [-] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 941.669525] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1e08d0e80e62493c9d7f9733872b7fe8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 941.676658] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e08d0e80e62493c9d7f9733872b7fe8 [ 941.688391] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401543, 'name': PowerOffVM_Task, 'duration_secs': 0.204153} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.688722] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 941.688994] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 941.689772] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af802fb-a9c9-4278-bdff-3e535946cf49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.695775] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 941.696064] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-561659c4-e57a-405b-a778-2b7793ba65bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.716000] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.717018] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.717018] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Deleting the datastore file [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.717018] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-007f8111-5116-47a5-97fa-02efef78e99f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.722577] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 941.722577] env[62109]: value = "task-401546" [ 941.722577] env[62109]: _type = "Task" [ 941.722577] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.730072] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.752687] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 941.779773] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.780136] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.780379] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.781706] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.781921] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.786444] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.786741] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.786970] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.787204] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.787433] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.787711] env[62109]: DEBUG nova.virt.hardware [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.788683] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa78c18-27f0-4996-a7cf-ca420dccb75d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.799568] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49e6272-2f57-4a39-a4e2-6483a556a4dc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.817055] env[62109]: ERROR nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Traceback (most recent call last): [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] yield resources [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self.driver.spawn(context, instance, image_meta, [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] vm_ref = self.build_virtual_machine(instance, [ 941.817055] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] vif_infos = vmwarevif.get_vif_info(self._session, [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] for vif in network_info: [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] return self._sync_wrapper(fn, *args, **kwargs) [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self.wait() [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self[:] = self._gt.wait() [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] return self._exit_event.wait() [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 941.817469] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] current.throw(*self._exc) [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] result = function(*args, **kwargs) [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] return func(*args, **kwargs) [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] raise e [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] nwinfo = self.network_api.allocate_for_instance( [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] created_port_ids = self._update_ports_for_instance( [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] with excutils.save_and_reraise_exception(): [ 941.817848] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self.force_reraise() [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] raise self.value [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] updated_port = self._update_port( [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] _ensure_no_port_binding_failure(port) [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] raise exception.PortBindingFailed(port_id=port['id']) [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] nova.exception.PortBindingFailed: Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. [ 941.818514] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] [ 941.818514] env[62109]: INFO nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Terminating instance [ 941.820305] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.820548] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.820772] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 941.821247] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 0c8273156cfa442384b513029e4f32c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 941.827837] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c8273156cfa442384b513029e4f32c4 [ 942.019356] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.062725] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.063293] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 942.065603] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 7ba90dec5e644b62b47a78326c98aca1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 942.067027] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.434s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.069217] env[62109]: INFO nova.compute.claims [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 942.070857] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 1836a68f1c03428ca08fd6cdf8dc9f22 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 942.096644] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ba90dec5e644b62b47a78326c98aca1 [ 942.104829] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1836a68f1c03428ca08fd6cdf8dc9f22 [ 942.171465] env[62109]: DEBUG nova.network.neutron [-] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.172431] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8fa6e5ae00a5442090b823fcd7c9a27f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 942.179961] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fa6e5ae00a5442090b823fcd7c9a27f [ 942.236060] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109772} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.236556] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.236909] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 942.237237] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 942.240308] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg f878613705764bebb106cbd193f48c86 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 942.300149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f878613705764bebb106cbd193f48c86 [ 942.337868] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 942.438726] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.439228] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg baab211fb6724d01a786d9ccbf8dfc41 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 942.447497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baab211fb6724d01a786d9ccbf8dfc41 [ 942.573588] env[62109]: DEBUG nova.compute.utils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 942.574250] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg ef5c401ec5ef4515947fd9d4eee0da21 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 942.576433] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 3f79968f8cd047cca3fd45f8c1478f10 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 942.577419] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 942.577659] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 942.583910] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef5c401ec5ef4515947fd9d4eee0da21 [ 942.586643] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f79968f8cd047cca3fd45f8c1478f10 [ 942.657152] env[62109]: DEBUG nova.policy [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28c774de080644e8979aad3cdb0c4400', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e2b950a9c8f42ffbe0d4ced7a3f289c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 942.674354] env[62109]: INFO nova.compute.manager [-] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Took 1.02 seconds to deallocate network for instance. [ 942.676819] env[62109]: DEBUG nova.compute.claims [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 942.677149] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.745832] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 6b0d4e1eb6954620a0fd5ab9db62426e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 942.773079] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b0d4e1eb6954620a0fd5ab9db62426e [ 942.941340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.941795] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 942.941990] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 942.942340] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da84e5bf-df54-4488-8427-e6640a228538 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.952531] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af754bcc-ca84-4ab1-bf60-fe47e0734f33 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.976360] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 27fed863-1e27-4258-8b43-b8cd23e3c1c0 could not be found. [ 942.976712] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 942.976999] env[62109]: INFO nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 942.977496] env[62109]: DEBUG oslo.service.loopingcall [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.977812] env[62109]: DEBUG nova.compute.manager [-] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 942.978035] env[62109]: DEBUG nova.network.neutron [-] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.992724] env[62109]: DEBUG nova.network.neutron [-] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 942.993335] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 94619e1626364baab2f077a353505279 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 943.000508] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94619e1626364baab2f077a353505279 [ 943.062392] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Successfully created port: ff125b21-2e96-4688-8b92-5ff1f1891e83 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.078133] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 943.079932] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 5f29118748fd4068909ee53472ffa281 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 943.123410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f29118748fd4068909ee53472ffa281 [ 943.272478] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ceff85-50b1-4b83-9561-075e262179b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.277377] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 943.278058] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 943.278185] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.278409] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 943.278589] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.278857] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 943.279079] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 943.279279] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 943.279465] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 943.279661] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 943.282516] env[62109]: DEBUG nova.virt.hardware [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.286421] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c056c4df-dd79-4f13-bc20-302ca93acd49 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.293911] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8027368-5488-4128-9508-44c20abf1c56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.297792] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01c7ae2-ee7b-4f3e-9533-eeab88c8c2d5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.311040] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 943.317084] env[62109]: DEBUG oslo.service.loopingcall [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.342413] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 943.342786] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36d42ebc-3e52-411f-a7fc-f88b6b905ab3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.357039] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9894afea-3f20-4017-91a4-2712d36cc14b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.366739] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216256e4-6eb0-4dcb-b344-a7ad6a1a4158 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.372307] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 943.372307] env[62109]: value = "task-401547" [ 943.372307] env[62109]: _type = "Task" [ 943.372307] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.384110] env[62109]: DEBUG nova.compute.provider_tree [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.384624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg c275321404244a84849867d9ef12f730 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 943.386405] env[62109]: DEBUG nova.compute.manager [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Received event network-changed-09fb75f7-ed62-435c-bde1-9bcbab653d49 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 943.386549] env[62109]: DEBUG nova.compute.manager [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Refreshing instance network info cache due to event network-changed-09fb75f7-ed62-435c-bde1-9bcbab653d49. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 943.386805] env[62109]: DEBUG oslo_concurrency.lockutils [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] Acquiring lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.387000] env[62109]: DEBUG oslo_concurrency.lockutils [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] Acquired lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.387150] env[62109]: DEBUG nova.network.neutron [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Refreshing network info cache for port 09fb75f7-ed62-435c-bde1-9bcbab653d49 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 943.387524] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] Expecting reply to msg 2d1f41ec057e42928b264f03bed3c73d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 943.398907] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401547, 'name': CreateVM_Task} progress is 15%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.399414] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c275321404244a84849867d9ef12f730 [ 943.399847] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d1f41ec057e42928b264f03bed3c73d [ 943.414947] env[62109]: DEBUG nova.network.neutron [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 943.495618] env[62109]: DEBUG nova.network.neutron [-] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.496135] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1484dd32abe643edb6324a7596d34bf3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 943.512598] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1484dd32abe643edb6324a7596d34bf3 [ 943.512598] env[62109]: DEBUG nova.network.neutron [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.512598] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] Expecting reply to msg 608e70e8d9604c9f96496357ce793055 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 943.521567] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 608e70e8d9604c9f96496357ce793055 [ 943.585304] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 699b2ef36756470baa76b143c6e29661 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 943.622863] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 699b2ef36756470baa76b143c6e29661 [ 943.880650] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401547, 'name': CreateVM_Task, 'duration_secs': 0.448201} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.880828] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 943.881236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.881399] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.881720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 943.881971] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-680d1905-8569-4402-9e66-0e390b94255e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.886170] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 943.886170] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52c1fe91-b141-97ce-2642-f0dc60d6c5b9" [ 943.886170] env[62109]: _type = "Task" [ 943.886170] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.889795] env[62109]: DEBUG nova.scheduler.client.report [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.892083] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 41ac71e05fa84512bf22310c69cf1052 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 943.898605] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52c1fe91-b141-97ce-2642-f0dc60d6c5b9, 'name': SearchDatastore_Task, 'duration_secs': 0.008514} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.898869] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.899100] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.899319] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.899460] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.899795] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.899863] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-261bd5fe-2728-4a47-9f9e-87ed8a832c84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.903255] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41ac71e05fa84512bf22310c69cf1052 [ 943.907343] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.907514] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 943.908246] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5273fe27-3dfe-4159-8683-2489cf3b15a9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.913685] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 943.913685] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]526207e2-4e99-9529-91f8-4ec5ee64acdd" [ 943.913685] env[62109]: _type = "Task" [ 943.913685] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.921683] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]526207e2-4e99-9529-91f8-4ec5ee64acdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.998174] env[62109]: INFO nova.compute.manager [-] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Took 1.02 seconds to deallocate network for instance. [ 944.000679] env[62109]: DEBUG nova.compute.claims [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 944.000998] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.013690] env[62109]: DEBUG oslo_concurrency.lockutils [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] Releasing lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.014051] env[62109]: DEBUG nova.compute.manager [req-43883c39-31cf-44a0-af75-9f6700269752 req-53b43b23-11b2-4f15-a331-f3c09691586c service nova] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Received event network-vif-deleted-09fb75f7-ed62-435c-bde1-9bcbab653d49 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 944.022175] env[62109]: ERROR nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. [ 944.022175] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 944.022175] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 944.022175] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 944.022175] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 944.022175] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 944.022175] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 944.022175] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 944.022175] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 944.022175] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 944.022175] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 944.022175] env[62109]: ERROR nova.compute.manager raise self.value [ 944.022175] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 944.022175] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 944.022175] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 944.022175] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 944.022689] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 944.022689] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 944.022689] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. [ 944.022689] env[62109]: ERROR nova.compute.manager [ 944.023026] env[62109]: Traceback (most recent call last): [ 944.023138] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 944.023138] env[62109]: listener.cb(fileno) [ 944.023219] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 944.023219] env[62109]: result = function(*args, **kwargs) [ 944.023293] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 944.023293] env[62109]: return func(*args, **kwargs) [ 944.023365] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 944.023365] env[62109]: raise e [ 944.023624] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 944.023624] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 944.023709] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 944.023709] env[62109]: created_port_ids = self._update_ports_for_instance( [ 944.023782] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 944.023782] env[62109]: with excutils.save_and_reraise_exception(): [ 944.023855] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 944.023855] env[62109]: self.force_reraise() [ 944.023927] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 944.023927] env[62109]: raise self.value [ 944.024000] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 944.024000] env[62109]: updated_port = self._update_port( [ 944.024117] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 944.024117] env[62109]: _ensure_no_port_binding_failure(port) [ 944.024210] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 944.024210] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 944.024299] env[62109]: nova.exception.PortBindingFailed: Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. [ 944.024353] env[62109]: Removing descriptor: 19 [ 944.088174] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 944.108272] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 944.108511] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 944.108665] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.108841] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 944.108983] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.109127] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 944.109325] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 944.109476] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 944.109668] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 944.109795] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 944.109953] env[62109]: DEBUG nova.virt.hardware [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 944.112082] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0638df-68ad-4c62-8a49-f247b13f381d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.118320] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da5a90c-b96b-4606-b55e-a32181b06f56 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.131749] env[62109]: ERROR nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Traceback (most recent call last): [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] yield resources [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self.driver.spawn(context, instance, image_meta, [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] vm_ref = self.build_virtual_machine(instance, [ 944.131749] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] vif_infos = vmwarevif.get_vif_info(self._session, [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] for vif in network_info: [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] return self._sync_wrapper(fn, *args, **kwargs) [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self.wait() [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self[:] = self._gt.wait() [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] return self._exit_event.wait() [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 944.132482] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] current.throw(*self._exc) [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] result = function(*args, **kwargs) [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] return func(*args, **kwargs) [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] raise e [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] nwinfo = self.network_api.allocate_for_instance( [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] created_port_ids = self._update_ports_for_instance( [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] with excutils.save_and_reraise_exception(): [ 944.133172] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self.force_reraise() [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] raise self.value [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] updated_port = self._update_port( [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] _ensure_no_port_binding_failure(port) [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] raise exception.PortBindingFailed(port_id=port['id']) [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] nova.exception.PortBindingFailed: Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. [ 944.133753] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] [ 944.133753] env[62109]: INFO nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Terminating instance [ 944.134338] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Acquiring lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.134338] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Acquired lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.134338] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 944.134689] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg a93493b89af04d469becd9e1a792cacf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 944.141225] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a93493b89af04d469becd9e1a792cacf [ 944.395305] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.395790] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 944.397540] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg b06faaf5700a408492b67982665ff35b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 944.398946] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.024s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.400300] env[62109]: INFO nova.compute.claims [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 944.401754] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 64e616eb9506468fa716fbeb34e80346 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 944.425190] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]526207e2-4e99-9529-91f8-4ec5ee64acdd, 'name': SearchDatastore_Task, 'duration_secs': 0.008066} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.426293] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f32c757-8dc2-4bb2-ab0c-2ba3a1ab39b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.428814] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b06faaf5700a408492b67982665ff35b [ 944.432290] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 944.432290] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52a7b6d3-570c-f11b-a6fa-fdcbfa64043b" [ 944.432290] env[62109]: _type = "Task" [ 944.432290] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.440131] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52a7b6d3-570c-f11b-a6fa-fdcbfa64043b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.441041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64e616eb9506468fa716fbeb34e80346 [ 944.651571] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 944.729714] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.730246] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 16567aad65604af8af62deb247c16e02 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 944.740362] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16567aad65604af8af62deb247c16e02 [ 944.905608] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg d3c7d8eb599349ee90b6f1a80b6a23f3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 944.907250] env[62109]: DEBUG nova.compute.utils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 944.907822] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg d9b74d65332546759716110db95a5448 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 944.909216] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 944.909391] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 944.913210] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3c7d8eb599349ee90b6f1a80b6a23f3 [ 944.917637] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9b74d65332546759716110db95a5448 [ 944.944274] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52a7b6d3-570c-f11b-a6fa-fdcbfa64043b, 'name': SearchDatastore_Task, 'duration_secs': 0.012523} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.944935] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.944935] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3/0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 944.945156] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ab04a16-f745-4531-a812-ff8a4761d6d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.948797] env[62109]: DEBUG nova.policy [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'feae790f4343445c86cfb1b39cb9636e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e10657828d9480b948e59b98490572b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 944.951778] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 944.951778] env[62109]: value = "task-401549" [ 944.951778] env[62109]: _type = "Task" [ 944.951778] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.959361] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401549, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.232720] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Releasing lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.232974] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 945.233302] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 945.234179] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d33993f-8daa-4c75-8db1-cdf4ca8700bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.243701] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edee66a1-8d0e-4410-83d6-59a0a8f69d45 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.257748] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Successfully created port: c678b72f-ea04-4809-afb1-fe2f0b013d2b {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.273731] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 228d2a6d-6c16-472c-9326-2e4576d9648c could not be found. [ 945.273731] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 945.273731] env[62109]: INFO nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 945.273731] env[62109]: DEBUG oslo.service.loopingcall [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.274177] env[62109]: DEBUG nova.compute.manager [-] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 945.274325] env[62109]: DEBUG nova.network.neutron [-] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 945.291688] env[62109]: DEBUG nova.network.neutron [-] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.292381] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2b004ba960f64d59b52b35b1633201eb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 945.300082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b004ba960f64d59b52b35b1633201eb [ 945.406965] env[62109]: DEBUG nova.compute.manager [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Received event network-changed-ff125b21-2e96-4688-8b92-5ff1f1891e83 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 945.407272] env[62109]: DEBUG nova.compute.manager [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Refreshing instance network info cache due to event network-changed-ff125b21-2e96-4688-8b92-5ff1f1891e83. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 945.408077] env[62109]: DEBUG oslo_concurrency.lockutils [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] Acquiring lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.408289] env[62109]: DEBUG oslo_concurrency.lockutils [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] Acquired lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.408464] env[62109]: DEBUG nova.network.neutron [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Refreshing network info cache for port ff125b21-2e96-4688-8b92-5ff1f1891e83 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 945.408903] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] Expecting reply to msg f30f080bc86b4ad589782dec68617630 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 945.421143] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 945.421143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg a331cfd201d54afcaa804d01a282f237 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 945.421143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f30f080bc86b4ad589782dec68617630 [ 945.458577] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a331cfd201d54afcaa804d01a282f237 [ 945.463246] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401549, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448156} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.466321] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3/0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 945.466731] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 945.467447] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee5e81b4-f8b0-450d-b63f-239a9d883d98 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.474108] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 945.474108] env[62109]: value = "task-401550" [ 945.474108] env[62109]: _type = "Task" [ 945.474108] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.487860] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401550, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.583781] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f668eb5d-052d-4e0d-b004-4dcc6a261a14 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.593335] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2de76c-3226-4a08-b32c-7595ce8ee1bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.627323] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2f1e1f-88cc-43b2-85d9-534810f1a02c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.634542] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a6f76b-b3f2-4635-ada1-24ceb449273e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.647160] env[62109]: DEBUG nova.compute.provider_tree [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.647686] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 891e6ce994df453aa7f854e78a030d5a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 945.654878] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 891e6ce994df453aa7f854e78a030d5a [ 945.795781] env[62109]: DEBUG nova.network.neutron [-] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.796390] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0e93b106306c4353b2881cfed7a7bd7a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 945.804884] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e93b106306c4353b2881cfed7a7bd7a [ 945.924918] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 09cf6c63f42841ff90f9065f18c01723 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 945.934239] env[62109]: DEBUG nova.network.neutron [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.962053] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09cf6c63f42841ff90f9065f18c01723 [ 945.985025] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401550, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06318} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.985284] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.986035] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c34c505-b9f2-4185-a392-736933391d2e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.006911] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3/0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 946.007160] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f238b8ec-7764-4999-8c67-3761604eb903 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.021486] env[62109]: DEBUG nova.network.neutron [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.021961] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] Expecting reply to msg 5848f4665ecd48a6be085ff6596f4146 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 946.027762] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 946.027762] env[62109]: value = "task-401551" [ 946.027762] env[62109]: _type = "Task" [ 946.027762] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.031287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5848f4665ecd48a6be085ff6596f4146 [ 946.036185] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.150950] env[62109]: DEBUG nova.scheduler.client.report [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.153655] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 3bebd5d8aab54e09aa83893a22ddaac3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 946.156889] env[62109]: ERROR nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. [ 946.156889] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 946.156889] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 946.156889] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 946.156889] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 946.156889] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 946.156889] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 946.156889] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 946.156889] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 946.156889] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 946.156889] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 946.156889] env[62109]: ERROR nova.compute.manager raise self.value [ 946.156889] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 946.156889] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 946.156889] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 946.156889] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 946.157468] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 946.157468] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 946.157468] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. [ 946.157468] env[62109]: ERROR nova.compute.manager [ 946.157468] env[62109]: Traceback (most recent call last): [ 946.157468] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 946.157468] env[62109]: listener.cb(fileno) [ 946.157468] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 946.157468] env[62109]: result = function(*args, **kwargs) [ 946.157468] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 946.157468] env[62109]: return func(*args, **kwargs) [ 946.157468] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 946.157468] env[62109]: raise e [ 946.157468] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 946.157468] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 946.157468] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 946.157468] env[62109]: created_port_ids = self._update_ports_for_instance( [ 946.157468] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 946.157468] env[62109]: with excutils.save_and_reraise_exception(): [ 946.157468] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 946.157468] env[62109]: self.force_reraise() [ 946.157468] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 946.157468] env[62109]: raise self.value [ 946.157468] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 946.157468] env[62109]: updated_port = self._update_port( [ 946.157468] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 946.157468] env[62109]: _ensure_no_port_binding_failure(port) [ 946.157468] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 946.157468] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 946.158453] env[62109]: nova.exception.PortBindingFailed: Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. [ 946.158453] env[62109]: Removing descriptor: 19 [ 946.164632] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bebd5d8aab54e09aa83893a22ddaac3 [ 946.298519] env[62109]: INFO nova.compute.manager [-] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Took 1.02 seconds to deallocate network for instance. [ 946.300853] env[62109]: DEBUG nova.compute.claims [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 946.301035] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.428061] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 946.455626] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 946.455626] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 946.455626] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.455863] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 946.455863] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.455863] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 946.455863] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 946.455863] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 946.456059] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 946.456059] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 946.456059] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 946.456059] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75ef4a0-abb2-4035-bd3e-f89f9b271f5e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.463392] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62ce917-f6df-4cea-853d-e97e7230e28a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.476853] env[62109]: ERROR nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Traceback (most recent call last): [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] yield resources [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self.driver.spawn(context, instance, image_meta, [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] vm_ref = self.build_virtual_machine(instance, [ 946.476853] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] vif_infos = vmwarevif.get_vif_info(self._session, [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] for vif in network_info: [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] return self._sync_wrapper(fn, *args, **kwargs) [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self.wait() [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self[:] = self._gt.wait() [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] return self._exit_event.wait() [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 946.477229] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] current.throw(*self._exc) [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] result = function(*args, **kwargs) [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] return func(*args, **kwargs) [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] raise e [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] nwinfo = self.network_api.allocate_for_instance( [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] created_port_ids = self._update_ports_for_instance( [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] with excutils.save_and_reraise_exception(): [ 946.477577] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self.force_reraise() [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] raise self.value [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] updated_port = self._update_port( [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] _ensure_no_port_binding_failure(port) [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] raise exception.PortBindingFailed(port_id=port['id']) [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] nova.exception.PortBindingFailed: Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. [ 946.477919] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] [ 946.477919] env[62109]: INFO nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Terminating instance [ 946.479219] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.479375] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquired lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.479534] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 946.479939] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 6e9706edc6a34b129f6053725b889f31 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 946.486595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e9706edc6a34b129f6053725b889f31 [ 946.524047] env[62109]: DEBUG oslo_concurrency.lockutils [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] Releasing lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.524467] env[62109]: DEBUG nova.compute.manager [req-54ce5f70-e358-44bd-9b39-679bd8ea9585 req-1493fd9e-4682-444c-8b4b-11dded89a532 service nova] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Received event network-vif-deleted-ff125b21-2e96-4688-8b92-5ff1f1891e83 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 946.538100] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401551, 'name': ReconfigVM_Task, 'duration_secs': 0.245905} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.538373] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3/0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.539007] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0582c963-73c3-4b51-b5fa-4fc2cdfb43b8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.545340] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 946.545340] env[62109]: value = "task-401552" [ 946.545340] env[62109]: _type = "Task" [ 946.545340] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.557347] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401552, 'name': Rename_Task} progress is 6%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.656124] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.656673] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 946.659048] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 0017917ef30043d7a2fd4c76b0e2305c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 946.659413] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.697s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.660836] env[62109]: INFO nova.compute.claims [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.662602] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 490d215b99f544ffabba11c0a8c14384 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 946.693698] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0017917ef30043d7a2fd4c76b0e2305c [ 946.697314] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 490d215b99f544ffabba11c0a8c14384 [ 946.998281] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 947.055713] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401552, 'name': Rename_Task, 'duration_secs': 0.132038} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.056067] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 947.056704] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c888a0f6-8a9b-40af-b254-8022bb238dd2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.062784] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 947.062784] env[62109]: value = "task-401553" [ 947.062784] env[62109]: _type = "Task" [ 947.062784] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.070101] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.084194] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.084194] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 09209c9da49b46c1a7b6031766950772 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.095990] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09209c9da49b46c1a7b6031766950772 [ 947.165500] env[62109]: DEBUG nova.compute.utils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 947.166616] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 634895323fe04a7c9d85d36fa35a3eb9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.168811] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg e986a8435cf84b5dbdd8ad622982d24d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.169402] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 947.169580] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 947.179087] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 634895323fe04a7c9d85d36fa35a3eb9 [ 947.185603] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e986a8435cf84b5dbdd8ad622982d24d [ 947.211172] env[62109]: DEBUG nova.policy [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'feae790f4343445c86cfb1b39cb9636e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e10657828d9480b948e59b98490572b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 947.433319] env[62109]: DEBUG nova.compute.manager [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Received event network-changed-c678b72f-ea04-4809-afb1-fe2f0b013d2b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 947.433629] env[62109]: DEBUG nova.compute.manager [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Refreshing instance network info cache due to event network-changed-c678b72f-ea04-4809-afb1-fe2f0b013d2b. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 947.433925] env[62109]: DEBUG oslo_concurrency.lockutils [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] Acquiring lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.483068] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Successfully created port: 977b3bb3-ccfc-4735-af64-154240ffc670 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.574233] env[62109]: DEBUG oslo_vmware.api [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401553, 'name': PowerOnVM_Task, 'duration_secs': 0.426838} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.574521] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 947.574726] env[62109]: DEBUG nova.compute.manager [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 947.575498] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ee198e-7113-4622-a20b-33924b9eff37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.583805] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 504063bce4134c8da16160b4fec36a22 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.585018] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Releasing lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.585453] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 947.585674] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 947.586252] env[62109]: DEBUG oslo_concurrency.lockutils [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] Acquired lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.586459] env[62109]: DEBUG nova.network.neutron [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Refreshing network info cache for port c678b72f-ea04-4809-afb1-fe2f0b013d2b {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 947.586885] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] Expecting reply to msg fbabd240780e4ab9b7e97bbaf4ba1257 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.587649] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-975e07c2-1125-43fb-853c-83d491edcbcb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.595663] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cbdcb5-1d7b-424d-839b-6d4ec3224ec7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.606850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbabd240780e4ab9b7e97bbaf4ba1257 [ 947.618936] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a879b81d-fb5a-483b-9c2a-4a5c416c1caa could not be found. [ 947.619159] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 947.619338] env[62109]: INFO nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Took 0.03 seconds to destroy the instance on the hypervisor. [ 947.619571] env[62109]: DEBUG oslo.service.loopingcall [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.619789] env[62109]: DEBUG nova.compute.manager [-] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 947.619882] env[62109]: DEBUG nova.network.neutron [-] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 947.631454] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 504063bce4134c8da16160b4fec36a22 [ 947.637423] env[62109]: DEBUG nova.network.neutron [-] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 947.637892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f16649805ce548249655db99f0b4a671 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.645147] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f16649805ce548249655db99f0b4a671 [ 947.670635] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 947.672260] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 26f51edca3d2403f834be363d54f26da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.716666] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Acquiring lock "37af2c2e-9c4e-445d-b128-c4c9137e73ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.716860] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Lock "37af2c2e-9c4e-445d-b128-c4c9137e73ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.741578] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26f51edca3d2403f834be363d54f26da [ 947.805728] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg f17f0cc9388a43c2af2b37afd13e6b6b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.827118] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f17f0cc9388a43c2af2b37afd13e6b6b [ 947.850750] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf342d59-5a0c-4bfe-8682-aee48a53da4f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.858707] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13d765a-dd74-4be0-9933-f077ca372a54 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.902891] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ac0ec8-fb15-47c0-aa39-ed7ffebdf93f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.911251] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33159d65-5948-4cab-9126-5b21f5ef31ba {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.924571] env[62109]: DEBUG nova.compute.provider_tree [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.925086] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 41c92193b02e403e842de4551b60218a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 947.931761] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41c92193b02e403e842de4551b60218a [ 948.094608] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.106947] env[62109]: DEBUG nova.network.neutron [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 948.140205] env[62109]: DEBUG nova.network.neutron [-] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.140863] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d232c989f51646038af9a92467ec36fc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.148680] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d232c989f51646038af9a92467ec36fc [ 948.181264] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 4599ea6312b3407bb2a219dad5d86baf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.182879] env[62109]: DEBUG nova.network.neutron [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.183303] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] Expecting reply to msg c0290fa8c5e14dfe9be4f8ea0a7e3a3b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.191266] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0290fa8c5e14dfe9be4f8ea0a7e3a3b [ 948.229468] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4599ea6312b3407bb2a219dad5d86baf [ 948.310893] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.311216] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.311479] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.311796] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.312023] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.314218] env[62109]: INFO nova.compute.manager [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Terminating instance [ 948.316588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "refresh_cache-0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.316787] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquired lock "refresh_cache-0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.317030] env[62109]: DEBUG nova.network.neutron [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 948.317538] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 6a15072af5ed4f8abe3679bd7a142b85 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.324426] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a15072af5ed4f8abe3679bd7a142b85 [ 948.370466] env[62109]: ERROR nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. [ 948.370466] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 948.370466] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 948.370466] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 948.370466] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 948.370466] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 948.370466] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 948.370466] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 948.370466] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 948.370466] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 948.370466] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 948.370466] env[62109]: ERROR nova.compute.manager raise self.value [ 948.370466] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 948.370466] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 948.370466] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 948.370466] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 948.371041] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 948.371041] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 948.371041] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. [ 948.371041] env[62109]: ERROR nova.compute.manager [ 948.371041] env[62109]: Traceback (most recent call last): [ 948.371041] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 948.371041] env[62109]: listener.cb(fileno) [ 948.371041] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 948.371041] env[62109]: result = function(*args, **kwargs) [ 948.371041] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 948.371041] env[62109]: return func(*args, **kwargs) [ 948.371041] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 948.371041] env[62109]: raise e [ 948.371041] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 948.371041] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 948.371041] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 948.371041] env[62109]: created_port_ids = self._update_ports_for_instance( [ 948.371041] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 948.371041] env[62109]: with excutils.save_and_reraise_exception(): [ 948.371041] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 948.371041] env[62109]: self.force_reraise() [ 948.371041] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 948.371041] env[62109]: raise self.value [ 948.371041] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 948.371041] env[62109]: updated_port = self._update_port( [ 948.371041] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 948.371041] env[62109]: _ensure_no_port_binding_failure(port) [ 948.371041] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 948.371041] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 948.371966] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. [ 948.371966] env[62109]: Removing descriptor: 19 [ 948.427409] env[62109]: DEBUG nova.scheduler.client.report [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 948.429891] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 0cd9ccf9234f421ab2533661bd246938 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.444893] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cd9ccf9234f421ab2533661bd246938 [ 948.643010] env[62109]: INFO nova.compute.manager [-] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Took 1.02 seconds to deallocate network for instance. [ 948.645324] env[62109]: DEBUG nova.compute.claims [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 948.645499] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.686249] env[62109]: DEBUG oslo_concurrency.lockutils [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] Releasing lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.686492] env[62109]: DEBUG nova.compute.manager [req-226133d7-b10d-4dc9-9b62-2995122a96a0 req-5eeda97f-f1d1-4a6d-8e2e-4dfd29e17d25 service nova] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Received event network-vif-deleted-c678b72f-ea04-4809-afb1-fe2f0b013d2b {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 948.687533] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 948.716188] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 948.716188] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 948.716188] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.716356] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 948.716356] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.716356] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 948.716356] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 948.716356] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 948.716525] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 948.716525] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 948.716525] env[62109]: DEBUG nova.virt.hardware [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 948.716525] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98966309-515a-4ca4-b191-06dd424345dd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.723483] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c046284-5d37-4315-a190-53ed6d840115 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.737960] env[62109]: ERROR nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Traceback (most recent call last): [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] yield resources [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self.driver.spawn(context, instance, image_meta, [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] vm_ref = self.build_virtual_machine(instance, [ 948.737960] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] vif_infos = vmwarevif.get_vif_info(self._session, [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] for vif in network_info: [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] return self._sync_wrapper(fn, *args, **kwargs) [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self.wait() [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self[:] = self._gt.wait() [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] return self._exit_event.wait() [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 948.738418] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] current.throw(*self._exc) [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] result = function(*args, **kwargs) [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] return func(*args, **kwargs) [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] raise e [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] nwinfo = self.network_api.allocate_for_instance( [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] created_port_ids = self._update_ports_for_instance( [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] with excutils.save_and_reraise_exception(): [ 948.738884] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self.force_reraise() [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] raise self.value [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] updated_port = self._update_port( [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] _ensure_no_port_binding_failure(port) [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] raise exception.PortBindingFailed(port_id=port['id']) [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] nova.exception.PortBindingFailed: Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. [ 948.739331] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] [ 948.739331] env[62109]: INFO nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Terminating instance [ 948.740157] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.740317] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquired lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.740475] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 948.740871] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 546769afadbc4af9b95356a5c0a889e6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.747082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 546769afadbc4af9b95356a5c0a889e6 [ 948.834560] env[62109]: DEBUG nova.network.neutron [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 948.886734] env[62109]: DEBUG nova.network.neutron [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.887244] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 01a7a7a5eb0a4ea8b01238b555273442 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.894756] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01a7a7a5eb0a4ea8b01238b555273442 [ 948.932364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.932880] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 948.935003] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg e9a7a6a2f4464e73940008478f86aa14 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.936038] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.411s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.937753] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 26cf54e385e3422ca1a537e76770ac2d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 948.979931] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9a7a6a2f4464e73940008478f86aa14 [ 948.980537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26cf54e385e3422ca1a537e76770ac2d [ 949.257895] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 949.334459] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.334962] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 2c72104f02774779ade48b8fda6e5040 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 949.343234] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c72104f02774779ade48b8fda6e5040 [ 949.390005] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Releasing lock "refresh_cache-0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.390450] env[62109]: DEBUG nova.compute.manager [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 949.390643] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 949.391512] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a9dd93-ff0c-4418-b5d2-04bab4bd9e89 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.399476] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 949.399702] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-808cc907-ced7-48c5-899e-c76a5a49c4e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.406434] env[62109]: DEBUG oslo_vmware.api [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 949.406434] env[62109]: value = "task-401554" [ 949.406434] env[62109]: _type = "Task" [ 949.406434] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.414529] env[62109]: DEBUG oslo_vmware.api [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.441549] env[62109]: DEBUG nova.compute.utils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 949.442375] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 92a75e9bf22d495b90ea6a22ab3e46a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 949.459731] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92a75e9bf22d495b90ea6a22ab3e46a2 [ 949.460662] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 949.462529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 20d6519314914903b0f015c748228fcb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 949.463951] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 949.464084] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 949.468253] env[62109]: DEBUG nova.compute.manager [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Received event network-changed-977b3bb3-ccfc-4735-af64-154240ffc670 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 949.468437] env[62109]: DEBUG nova.compute.manager [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Refreshing instance network info cache due to event network-changed-977b3bb3-ccfc-4735-af64-154240ffc670. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 949.468623] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] Acquiring lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.506792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20d6519314914903b0f015c748228fcb [ 949.517340] env[62109]: DEBUG nova.policy [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2883d8b8ab764050a13c8b3a56318c34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98934316e57a4ea69b2bb5a2f2aaf251', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 949.635036] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4150b749-8a23-4502-a8aa-494f96aa1c30 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.642667] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14a4b96-7f49-488f-a5cf-3ca55d2b3fb7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.673717] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd35ee7e-558d-453e-ae3d-1bb890231c92 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.681155] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345200df-72c8-41bc-a7f5-702bfdeea994 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.694201] env[62109]: DEBUG nova.compute.provider_tree [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.694716] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 2c4e06ef846241a5aa59fe0e10fee74e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 949.701874] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c4e06ef846241a5aa59fe0e10fee74e [ 949.823757] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Successfully created port: 73a1a0e1-a2b2-4bfd-af75-9650be5f9837 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 949.836929] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Releasing lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.837358] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 949.837559] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 949.837870] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] Acquired lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.838101] env[62109]: DEBUG nova.network.neutron [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Refreshing network info cache for port 977b3bb3-ccfc-4735-af64-154240ffc670 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 949.838563] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] Expecting reply to msg 7c40342dce5c451eb818216c8285366d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 949.840369] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c729d701-376a-46c2-94c7-5db7fd58c365 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.847051] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c40342dce5c451eb818216c8285366d [ 949.850140] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ac28cf-e8d1-4cd5-99d8-a0df26dc0372 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.873143] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9ab3a71f-7e26-4d29-b006-6dbebcee16e1 could not be found. [ 949.873356] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 949.873534] env[62109]: INFO nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 949.873772] env[62109]: DEBUG oslo.service.loopingcall [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.873992] env[62109]: DEBUG nova.compute.manager [-] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 949.874091] env[62109]: DEBUG nova.network.neutron [-] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 949.890653] env[62109]: DEBUG nova.network.neutron [-] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 949.891087] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5190d24204484996b0282a140c302091 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 949.897560] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5190d24204484996b0282a140c302091 [ 949.915144] env[62109]: DEBUG oslo_vmware.api [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401554, 'name': PowerOffVM_Task, 'duration_secs': 0.18326} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.915365] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 949.915531] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 949.915766] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef702abd-92d4-44ad-a34c-57dc2f65e1e0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.939764] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 949.940264] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 949.940517] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Deleting the datastore file [datastore1] 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.940777] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-966806a4-365a-40dc-9b00-b7328aa90dc3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.946812] env[62109]: DEBUG oslo_vmware.api [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for the task: (returnval){ [ 949.946812] env[62109]: value = "task-401556" [ 949.946812] env[62109]: _type = "Task" [ 949.946812] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.955082] env[62109]: DEBUG oslo_vmware.api [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.972225] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 0c368bc57e484617b5c47b238546568a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.005008] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c368bc57e484617b5c47b238546568a [ 950.197486] env[62109]: DEBUG nova.scheduler.client.report [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 950.200096] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 14209aa2695d43e2a45b36763e3c3a22 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.211648] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14209aa2695d43e2a45b36763e3c3a22 [ 950.356841] env[62109]: DEBUG nova.network.neutron [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 950.393088] env[62109]: DEBUG nova.network.neutron [-] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.393616] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg eaf1b787354d4d2396695b3e3656f7a1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.402201] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaf1b787354d4d2396695b3e3656f7a1 [ 950.445637] env[62109]: DEBUG nova.network.neutron [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.446146] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] Expecting reply to msg 214f09605232414e94dad355967dc9e6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.454460] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 214f09605232414e94dad355967dc9e6 [ 950.458243] env[62109]: DEBUG oslo_vmware.api [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Task: {'id': task-401556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093244} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.458243] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.458373] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 950.458541] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 950.458704] env[62109]: INFO nova.compute.manager [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Took 1.07 seconds to destroy the instance on the hypervisor. [ 950.458928] env[62109]: DEBUG oslo.service.loopingcall [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.459126] env[62109]: DEBUG nova.compute.manager [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 950.459223] env[62109]: DEBUG nova.network.neutron [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 950.472481] env[62109]: DEBUG nova.network.neutron [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 950.472915] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 79563d22915f47f4846d243103bc316b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.478997] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 950.478997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79563d22915f47f4846d243103bc316b [ 950.504776] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 950.505020] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 950.505169] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 950.505347] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 950.505490] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 950.505631] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 950.505830] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 950.505985] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 950.506142] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 950.506299] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 950.506461] env[62109]: DEBUG nova.virt.hardware [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 950.507581] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b529ea0-f475-437a-b13f-a2aec58ec98a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.515353] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecaf9f7-6a14-4eb8-ab53-2428daf6eff0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.673224] env[62109]: ERROR nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. [ 950.673224] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 950.673224] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 950.673224] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 950.673224] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 950.673224] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 950.673224] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 950.673224] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 950.673224] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 950.673224] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 950.673224] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 950.673224] env[62109]: ERROR nova.compute.manager raise self.value [ 950.673224] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 950.673224] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 950.673224] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 950.673224] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 950.673735] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 950.673735] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 950.673735] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. [ 950.673735] env[62109]: ERROR nova.compute.manager [ 950.673735] env[62109]: Traceback (most recent call last): [ 950.673735] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 950.673735] env[62109]: listener.cb(fileno) [ 950.673735] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 950.673735] env[62109]: result = function(*args, **kwargs) [ 950.673735] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 950.673735] env[62109]: return func(*args, **kwargs) [ 950.673735] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 950.673735] env[62109]: raise e [ 950.673735] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 950.673735] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 950.673735] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 950.673735] env[62109]: created_port_ids = self._update_ports_for_instance( [ 950.673735] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 950.673735] env[62109]: with excutils.save_and_reraise_exception(): [ 950.673735] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 950.673735] env[62109]: self.force_reraise() [ 950.673735] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 950.673735] env[62109]: raise self.value [ 950.673735] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 950.673735] env[62109]: updated_port = self._update_port( [ 950.673735] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 950.673735] env[62109]: _ensure_no_port_binding_failure(port) [ 950.673735] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 950.673735] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 950.674607] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. [ 950.674607] env[62109]: Removing descriptor: 19 [ 950.674607] env[62109]: ERROR nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Traceback (most recent call last): [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] yield resources [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self.driver.spawn(context, instance, image_meta, [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 950.674607] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] vm_ref = self.build_virtual_machine(instance, [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] vif_infos = vmwarevif.get_vif_info(self._session, [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] for vif in network_info: [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] return self._sync_wrapper(fn, *args, **kwargs) [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self.wait() [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self[:] = self._gt.wait() [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] return self._exit_event.wait() [ 950.674983] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] result = hub.switch() [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] return self.greenlet.switch() [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] result = function(*args, **kwargs) [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] return func(*args, **kwargs) [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] raise e [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] nwinfo = self.network_api.allocate_for_instance( [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 950.675396] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] created_port_ids = self._update_ports_for_instance( [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] with excutils.save_and_reraise_exception(): [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self.force_reraise() [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] raise self.value [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] updated_port = self._update_port( [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] _ensure_no_port_binding_failure(port) [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 950.675813] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] raise exception.PortBindingFailed(port_id=port['id']) [ 950.676217] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] nova.exception.PortBindingFailed: Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. [ 950.676217] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] [ 950.676217] env[62109]: INFO nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Terminating instance [ 950.676548] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.676704] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquired lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.676861] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 950.677272] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg ef8ac71a7a2b4c338d376cdd82f3c968 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.684410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef8ac71a7a2b4c338d376cdd82f3c968 [ 950.702637] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.767s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.703211] env[62109]: ERROR nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Traceback (most recent call last): [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self.driver.spawn(context, instance, image_meta, [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] vm_ref = self.build_virtual_machine(instance, [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] vif_infos = vmwarevif.get_vif_info(self._session, [ 950.703211] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] for vif in network_info: [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] return self._sync_wrapper(fn, *args, **kwargs) [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self.wait() [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self[:] = self._gt.wait() [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] return self._exit_event.wait() [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] current.throw(*self._exc) [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 950.703824] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] result = function(*args, **kwargs) [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] return func(*args, **kwargs) [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] raise e [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] nwinfo = self.network_api.allocate_for_instance( [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] created_port_ids = self._update_ports_for_instance( [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] with excutils.save_and_reraise_exception(): [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] self.force_reraise() [ 950.704630] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] raise self.value [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] updated_port = self._update_port( [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] _ensure_no_port_binding_failure(port) [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] raise exception.PortBindingFailed(port_id=port['id']) [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] nova.exception.PortBindingFailed: Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. [ 950.705221] env[62109]: ERROR nova.compute.manager [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] [ 950.705221] env[62109]: DEBUG nova.compute.utils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 950.705677] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.416s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.706350] env[62109]: INFO nova.compute.claims [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.707852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg dbbc97a11a9a4c99987134fc1fba8519 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.709393] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Build of instance 5f58014c-e132-4fad-9ba7-bc183318200f was re-scheduled: Binding failed for port 66844096-b3f6-4317-9dfc-3d1d1cd65bd5, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 950.709790] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 950.710005] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Acquiring lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.710150] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Acquired lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.710352] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 950.710820] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 4b9311c7011a495f9428c9c8ab4d146b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.722360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b9311c7011a495f9428c9c8ab4d146b [ 950.745217] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbbc97a11a9a4c99987134fc1fba8519 [ 950.895808] env[62109]: INFO nova.compute.manager [-] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Took 1.02 seconds to deallocate network for instance. [ 950.898407] env[62109]: DEBUG nova.compute.claims [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 950.898588] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.948346] env[62109]: DEBUG oslo_concurrency.lockutils [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] Releasing lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.948648] env[62109]: DEBUG nova.compute.manager [req-0a2a7c3d-b850-4182-8732-734ecfbfdd20 req-ce55da29-638a-4821-a06d-4e2d2a240407 service nova] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Received event network-vif-deleted-977b3bb3-ccfc-4735-af64-154240ffc670 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 950.975184] env[62109]: DEBUG nova.network.neutron [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.975609] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2c28e9117ef2482c91764af4e7090383 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 950.984315] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c28e9117ef2482c91764af4e7090383 [ 951.197552] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 951.215143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 87ce0008376b4963b2a1f790ba274dc5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 951.222518] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87ce0008376b4963b2a1f790ba274dc5 [ 951.233124] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 951.278071] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.278593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 3f416f22aa02430b9bee4e744117f8be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 951.287983] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f416f22aa02430b9bee4e744117f8be [ 951.314073] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.314582] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg a8b09d4aa91f4a17b9efc6884b07be9c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 951.322430] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8b09d4aa91f4a17b9efc6884b07be9c [ 951.477569] env[62109]: INFO nova.compute.manager [-] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Took 1.02 seconds to deallocate network for instance. [ 951.481658] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 3be08dcbfe3c4e65b11141697f3a2842 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 951.495656] env[62109]: DEBUG nova.compute.manager [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Received event network-changed-73a1a0e1-a2b2-4bfd-af75-9650be5f9837 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 951.495818] env[62109]: DEBUG nova.compute.manager [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Refreshing instance network info cache due to event network-changed-73a1a0e1-a2b2-4bfd-af75-9650be5f9837. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 951.496113] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] Acquiring lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.508863] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3be08dcbfe3c4e65b11141697f3a2842 [ 951.781162] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Releasing lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.781742] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 951.782074] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 951.782476] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] Acquired lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.782776] env[62109]: DEBUG nova.network.neutron [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Refreshing network info cache for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 951.783297] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] Expecting reply to msg 362f1db6befb47a1a6a657a3e0750323 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 951.784640] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-705930f2-9f05-47a0-afdd-43989b5c6784 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.791172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 362f1db6befb47a1a6a657a3e0750323 [ 951.795744] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c797cd-133f-4025-b559-cc794f1d8b99 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.816563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Releasing lock "refresh_cache-5f58014c-e132-4fad-9ba7-bc183318200f" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.816906] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 951.817228] env[62109]: DEBUG nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 951.817542] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 951.824772] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b04cc451-a497-474f-90dd-282a469ff3c2 could not be found. [ 951.825117] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 951.825436] env[62109]: INFO nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 951.825807] env[62109]: DEBUG oslo.service.loopingcall [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.826554] env[62109]: DEBUG nova.compute.manager [-] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 951.826874] env[62109]: DEBUG nova.network.neutron [-] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 951.838721] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 951.839368] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 5e13a3bf7db94a31bf87eefce53ba9ab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 951.840935] env[62109]: DEBUG nova.network.neutron [-] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 951.841555] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b2cdc75e94164da3956aa618398b664e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 951.847143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e13a3bf7db94a31bf87eefce53ba9ab [ 951.848509] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2cdc75e94164da3956aa618398b664e [ 951.875528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da7a708-58df-40dd-bec3-9e85f6b38b4b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.884465] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919c3869-7111-4944-9087-ad15edeb1f9c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.913463] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2da6a23-9b3d-47b9-8673-32341c167202 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.920175] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d44c1f2-188c-40ea-890c-239efebb1880 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.932561] env[62109]: DEBUG nova.compute.provider_tree [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.933173] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg da3931c9c8454ccb944eb1c2e7ebae6b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 951.939733] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da3931c9c8454ccb944eb1c2e7ebae6b [ 951.984420] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.303651] env[62109]: DEBUG nova.network.neutron [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 952.343971] env[62109]: DEBUG nova.network.neutron [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.344482] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 824103deac6c4a7f8ad9d93b30355374 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 952.345353] env[62109]: DEBUG nova.network.neutron [-] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.345584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bcbc3b2b0e754c18ab118dee02225b14 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 952.352504] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 824103deac6c4a7f8ad9d93b30355374 [ 952.353152] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcbc3b2b0e754c18ab118dee02225b14 [ 952.374528] env[62109]: DEBUG nova.network.neutron [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.375018] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] Expecting reply to msg 910cd7888d2c4c7cb91b092536535e84 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 952.381983] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 910cd7888d2c4c7cb91b092536535e84 [ 952.436336] env[62109]: DEBUG nova.scheduler.client.report [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.438783] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 26e1be9c4fc3496ab7e3e636854cf694 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 952.450867] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26e1be9c4fc3496ab7e3e636854cf694 [ 952.847792] env[62109]: INFO nova.compute.manager [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] [instance: 5f58014c-e132-4fad-9ba7-bc183318200f] Took 1.03 seconds to deallocate network for instance. [ 952.849705] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 585ad9ba0774452e9bbf453cff706502 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 952.850821] env[62109]: INFO nova.compute.manager [-] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Took 1.02 seconds to deallocate network for instance. [ 952.852952] env[62109]: DEBUG nova.compute.claims [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 952.853195] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.876960] env[62109]: DEBUG oslo_concurrency.lockutils [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] Releasing lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.877411] env[62109]: DEBUG nova.compute.manager [req-ec0baf33-4ec3-4779-b9d7-66689789ca7a req-f4383d13-8514-43b0-a7e5-e5a7d0458102 service nova] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Received event network-vif-deleted-73a1a0e1-a2b2-4bfd-af75-9650be5f9837 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 952.879872] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 585ad9ba0774452e9bbf453cff706502 [ 952.941485] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.942091] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 952.943775] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg ae0ad22c413e41189e5054f6876cbaf8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 952.944885] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.465s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.946408] env[62109]: INFO nova.compute.claims [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.948128] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 278cd03d545041f381d752a28c858fae in queue reply_7522b64acfeb4981b1f36928b040d568 [ 952.972186] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae0ad22c413e41189e5054f6876cbaf8 [ 952.979939] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 278cd03d545041f381d752a28c858fae [ 953.354356] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 270d847ad44546ab83b7a8dd43304b20 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 953.383187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 270d847ad44546ab83b7a8dd43304b20 [ 953.451999] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg c534c55147b44ea9b55c8b7bb05e6ce5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 953.453592] env[62109]: DEBUG nova.compute.utils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 953.454143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 56a1df16c5a34af2b070f0724ea32a08 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 953.455373] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 953.455529] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 953.460096] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c534c55147b44ea9b55c8b7bb05e6ce5 [ 953.463611] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56a1df16c5a34af2b070f0724ea32a08 [ 953.503372] env[62109]: DEBUG nova.policy [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6349e1aff7d945a6a471b1f4e826b23f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61a866168186462d9d849072a1ff25f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 953.767719] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Successfully created port: 78639c1d-5491-49ff-8e1b-f3202998b190 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.874659] env[62109]: INFO nova.scheduler.client.report [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Deleted allocations for instance 5f58014c-e132-4fad-9ba7-bc183318200f [ 953.880661] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Expecting reply to msg 12f71f45ed284391a1527d55ac4e2025 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 953.893526] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12f71f45ed284391a1527d55ac4e2025 [ 953.959201] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 953.960852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg b17f20d7091f4367b804d0b64f347eea in queue reply_7522b64acfeb4981b1f36928b040d568 [ 953.997669] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b17f20d7091f4367b804d0b64f347eea [ 954.082496] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911e1488-b27c-412e-9ce2-87de5dffddbc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.090512] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf0df96-35e2-4d30-978b-bed9c538fa9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.119681] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f82f69-df93-484c-a38b-6e192c7808ca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.129809] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddf260d-d7a6-4834-9a22-c2deec569305 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.142838] env[62109]: DEBUG nova.compute.provider_tree [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.143341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 188ea56e0c734c35bab06ddc28bf8806 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 954.150306] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 188ea56e0c734c35bab06ddc28bf8806 [ 954.382987] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8c2f415b-a434-4212-a156-3c4cb8cd605b tempest-ServerRescueTestJSON-604246021 tempest-ServerRescueTestJSON-604246021-project-member] Lock "5f58014c-e132-4fad-9ba7-bc183318200f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.134s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.383614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg bbe8e6eb64aa40eaa4bc3825b8217ac5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 954.396533] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbe8e6eb64aa40eaa4bc3825b8217ac5 [ 954.446114] env[62109]: DEBUG nova.compute.manager [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Received event network-changed-78639c1d-5491-49ff-8e1b-f3202998b190 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 954.446318] env[62109]: DEBUG nova.compute.manager [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Refreshing instance network info cache due to event network-changed-78639c1d-5491-49ff-8e1b-f3202998b190. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 954.446535] env[62109]: DEBUG oslo_concurrency.lockutils [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] Acquiring lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.446676] env[62109]: DEBUG oslo_concurrency.lockutils [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] Acquired lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.446831] env[62109]: DEBUG nova.network.neutron [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Refreshing network info cache for port 78639c1d-5491-49ff-8e1b-f3202998b190 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 954.447257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] Expecting reply to msg 5c51074a661347e1b0fde197e4164e95 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 954.455340] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c51074a661347e1b0fde197e4164e95 [ 954.466894] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 5cc72c035d6842ce965fdd168c887311 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 954.507071] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cc72c035d6842ce965fdd168c887311 [ 954.646647] env[62109]: DEBUG nova.scheduler.client.report [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 954.649157] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 1af198df09a14ab197b8b88add5199e5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 954.660536] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1af198df09a14ab197b8b88add5199e5 [ 954.815306] env[62109]: ERROR nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. [ 954.815306] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 954.815306] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 954.815306] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 954.815306] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 954.815306] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 954.815306] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 954.815306] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 954.815306] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 954.815306] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 954.815306] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 954.815306] env[62109]: ERROR nova.compute.manager raise self.value [ 954.815306] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 954.815306] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 954.815306] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 954.815306] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 954.816050] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 954.816050] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 954.816050] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. [ 954.816050] env[62109]: ERROR nova.compute.manager [ 954.816050] env[62109]: Traceback (most recent call last): [ 954.816050] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 954.816050] env[62109]: listener.cb(fileno) [ 954.816050] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 954.816050] env[62109]: result = function(*args, **kwargs) [ 954.816050] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 954.816050] env[62109]: return func(*args, **kwargs) [ 954.816050] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 954.816050] env[62109]: raise e [ 954.816050] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 954.816050] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 954.816050] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 954.816050] env[62109]: created_port_ids = self._update_ports_for_instance( [ 954.816050] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 954.816050] env[62109]: with excutils.save_and_reraise_exception(): [ 954.816050] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 954.816050] env[62109]: self.force_reraise() [ 954.816050] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 954.816050] env[62109]: raise self.value [ 954.816050] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 954.816050] env[62109]: updated_port = self._update_port( [ 954.816050] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 954.816050] env[62109]: _ensure_no_port_binding_failure(port) [ 954.816050] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 954.816050] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 954.817334] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. [ 954.817334] env[62109]: Removing descriptor: 19 [ 954.887213] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 954.889146] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 1198664f23a745eda1d828a27747950d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 954.922145] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1198664f23a745eda1d828a27747950d [ 954.964984] env[62109]: DEBUG nova.network.neutron [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 954.970788] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 954.994814] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.995075] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.995229] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.995413] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.995553] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.995692] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.995890] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.996111] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.996276] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.996440] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.996611] env[62109]: DEBUG nova.virt.hardware [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.997451] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502c20cd-d920-49e5-a8b5-47d8b3f5d84f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.005319] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15f68e7-c688-489c-9951-aba08d977c4a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.019566] env[62109]: ERROR nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Traceback (most recent call last): [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] yield resources [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self.driver.spawn(context, instance, image_meta, [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self._vmops.spawn(context, instance, image_meta, injected_files, [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] vm_ref = self.build_virtual_machine(instance, [ 955.019566] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] vif_infos = vmwarevif.get_vif_info(self._session, [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] for vif in network_info: [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] return self._sync_wrapper(fn, *args, **kwargs) [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self.wait() [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self[:] = self._gt.wait() [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] return self._exit_event.wait() [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 955.019937] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] current.throw(*self._exc) [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] result = function(*args, **kwargs) [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] return func(*args, **kwargs) [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] raise e [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] nwinfo = self.network_api.allocate_for_instance( [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] created_port_ids = self._update_ports_for_instance( [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] with excutils.save_and_reraise_exception(): [ 955.020496] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self.force_reraise() [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] raise self.value [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] updated_port = self._update_port( [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] _ensure_no_port_binding_failure(port) [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] raise exception.PortBindingFailed(port_id=port['id']) [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] nova.exception.PortBindingFailed: Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. [ 955.020854] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] [ 955.020854] env[62109]: INFO nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Terminating instance [ 955.024182] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.051638] env[62109]: DEBUG nova.network.neutron [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.052161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] Expecting reply to msg 0e48b1f9b2bd46d4943286ea956923d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 955.062298] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e48b1f9b2bd46d4943286ea956923d0 [ 955.151767] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.152302] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 955.154070] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 40e4b8a725fb44a59a2515947997455c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 955.155049] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.136s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.165807] env[62109]: INFO nova.compute.claims [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.167345] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg ccdcf638ca264746b717b142c5f2ebd2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 955.196258] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40e4b8a725fb44a59a2515947997455c [ 955.204558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccdcf638ca264746b717b142c5f2ebd2 [ 955.411444] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.554760] env[62109]: DEBUG oslo_concurrency.lockutils [req-b470495f-1274-472a-bc4b-93e12ae3c6cc req-53dcff60-3458-407c-bf54-8c12bdc94b9b service nova] Releasing lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.555197] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.555387] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 955.555838] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg ee43de9ac2b44a7bbf9a4ac6bfe0e61f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 955.563313] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee43de9ac2b44a7bbf9a4ac6bfe0e61f [ 955.670738] env[62109]: DEBUG nova.compute.utils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.671396] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 3ef6c1978d6143d9a43a2232d126e4da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 955.676800] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg ea95dcc7ae0d47a7b8a8a8347b6b443e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 955.676800] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 955.676800] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 955.681505] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef6c1978d6143d9a43a2232d126e4da [ 955.684498] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea95dcc7ae0d47a7b8a8a8347b6b443e [ 955.722058] env[62109]: DEBUG nova.policy [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '24290d4ecad54e0083310fcd3c199cb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '760ebbb02a9c44abb44ea37f93694ada', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 956.017244] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Successfully created port: bc6eae64-58f5-45f8-84e4-a333eeaf85e1 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.073681] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 956.159087] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.159814] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 96b7091e2cfc4695856ff4cd00caa8f8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 956.172688] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96b7091e2cfc4695856ff4cd00caa8f8 [ 956.174684] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 956.177274] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg cf7c116182a8438695e3da6bc6349954 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 956.213847] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf7c116182a8438695e3da6bc6349954 [ 956.347629] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d1790e-78f1-4683-b7ba-81bac292d945 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.359519] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2232166c-d60b-4733-ad4d-f698bf6bf6e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.396416] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59425a97-e4d0-45a2-9f2b-9e7d23cc5447 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.405074] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51aa633c-3df6-4cf3-806e-b5a94e56c92d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.419920] env[62109]: DEBUG nova.compute.provider_tree [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.420524] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 94bb19588c9f4803b803d7404084fa1b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 956.427940] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94bb19588c9f4803b803d7404084fa1b [ 956.510903] env[62109]: DEBUG nova.compute.manager [req-cecfd4a8-c418-4d12-a9b0-d9ea70283309 req-02350db2-8cf5-4cf0-8c96-0f95b6545353 service nova] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Received event network-vif-deleted-78639c1d-5491-49ff-8e1b-f3202998b190 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 956.673578] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.673986] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 956.674174] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 956.674475] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c42d26f3-cc41-4da8-a0f3-10b6c1878739 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.684628] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96c5995-d24f-488d-98c1-498632b1b04a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.697670] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg b4f391172636432d8aae815207ca221a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 956.711200] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d622ca94-7f5c-47f4-8077-ff37f64eea02 could not be found. [ 956.711412] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 956.711582] env[62109]: INFO nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Took 0.04 seconds to destroy the instance on the hypervisor. [ 956.711810] env[62109]: DEBUG oslo.service.loopingcall [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.712035] env[62109]: DEBUG nova.compute.manager [-] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 956.712124] env[62109]: DEBUG nova.network.neutron [-] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 956.732651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4f391172636432d8aae815207ca221a [ 956.777663] env[62109]: DEBUG nova.network.neutron [-] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 956.778194] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7e7c1b84d8f14a2e926ba654b480c2be in queue reply_7522b64acfeb4981b1f36928b040d568 [ 956.785204] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e7c1b84d8f14a2e926ba654b480c2be [ 956.929067] env[62109]: DEBUG nova.scheduler.client.report [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 956.931403] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 1e929d87bc7d447e9f589267e005316c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 956.942014] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e929d87bc7d447e9f589267e005316c [ 957.058870] env[62109]: ERROR nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. [ 957.058870] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 957.058870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 957.058870] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 957.058870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 957.058870] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 957.058870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 957.058870] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 957.058870] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 957.058870] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 957.058870] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 957.058870] env[62109]: ERROR nova.compute.manager raise self.value [ 957.058870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 957.058870] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 957.058870] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 957.058870] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 957.059700] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 957.059700] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 957.059700] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. [ 957.059700] env[62109]: ERROR nova.compute.manager [ 957.059700] env[62109]: Traceback (most recent call last): [ 957.059700] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 957.059700] env[62109]: listener.cb(fileno) [ 957.059700] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 957.059700] env[62109]: result = function(*args, **kwargs) [ 957.059700] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 957.059700] env[62109]: return func(*args, **kwargs) [ 957.059700] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 957.059700] env[62109]: raise e [ 957.059700] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 957.059700] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 957.059700] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 957.059700] env[62109]: created_port_ids = self._update_ports_for_instance( [ 957.059700] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 957.059700] env[62109]: with excutils.save_and_reraise_exception(): [ 957.059700] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 957.059700] env[62109]: self.force_reraise() [ 957.059700] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 957.059700] env[62109]: raise self.value [ 957.059700] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 957.059700] env[62109]: updated_port = self._update_port( [ 957.059700] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 957.059700] env[62109]: _ensure_no_port_binding_failure(port) [ 957.059700] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 957.059700] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 957.060669] env[62109]: nova.exception.PortBindingFailed: Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. [ 957.060669] env[62109]: Removing descriptor: 19 [ 957.201330] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 957.227554] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 957.227893] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 957.228197] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.228384] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 957.228576] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.228794] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 957.229088] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 957.229295] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 957.229474] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 957.229938] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 957.229938] env[62109]: DEBUG nova.virt.hardware [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 957.230647] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de237a22-a8ff-41bd-942b-baef7f0d774b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.238809] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67fdc46-2c27-4cc8-bbb4-d3466c0856a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.251959] env[62109]: ERROR nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Traceback (most recent call last): [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] yield resources [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self.driver.spawn(context, instance, image_meta, [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] vm_ref = self.build_virtual_machine(instance, [ 957.251959] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] vif_infos = vmwarevif.get_vif_info(self._session, [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] for vif in network_info: [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] return self._sync_wrapper(fn, *args, **kwargs) [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self.wait() [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self[:] = self._gt.wait() [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] return self._exit_event.wait() [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 957.252492] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] current.throw(*self._exc) [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] result = function(*args, **kwargs) [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] return func(*args, **kwargs) [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] raise e [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] nwinfo = self.network_api.allocate_for_instance( [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] created_port_ids = self._update_ports_for_instance( [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] with excutils.save_and_reraise_exception(): [ 957.252846] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self.force_reraise() [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] raise self.value [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] updated_port = self._update_port( [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] _ensure_no_port_binding_failure(port) [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] raise exception.PortBindingFailed(port_id=port['id']) [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] nova.exception.PortBindingFailed: Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. [ 957.253325] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] [ 957.253325] env[62109]: INFO nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Terminating instance [ 957.254554] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Acquiring lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.254710] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Acquired lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.254870] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 957.255289] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg d278e43d629b4deb810db773a6895967 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 957.263416] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d278e43d629b4deb810db773a6895967 [ 957.279694] env[62109]: DEBUG nova.network.neutron [-] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.280089] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e5fe3660f15040bdbd1274c85bba1767 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 957.287234] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5fe3660f15040bdbd1274c85bba1767 [ 957.433874] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.434436] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 957.436042] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 8ffd4426eb1c4125bfe4e986bfddda0e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 957.437027] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.760s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.438815] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 6494e764e2064130a34454ca420d5025 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 957.468159] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ffd4426eb1c4125bfe4e986bfddda0e [ 957.470099] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6494e764e2064130a34454ca420d5025 [ 957.770764] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 957.782056] env[62109]: INFO nova.compute.manager [-] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Took 1.07 seconds to deallocate network for instance. [ 957.784226] env[62109]: DEBUG nova.compute.claims [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 957.784408] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.847371] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.847817] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 9bba8ff098d84c9f8c088b6752e5e6e9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 957.855976] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bba8ff098d84c9f8c088b6752e5e6e9 [ 957.942304] env[62109]: DEBUG nova.compute.utils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 957.942906] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg cd0d60020b8c4a19a029c86cef682020 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 957.946695] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 957.946861] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 957.952466] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd0d60020b8c4a19a029c86cef682020 [ 957.986648] env[62109]: DEBUG nova.policy [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba5969be1a254281b4dffd81aa84ae7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa59611b27bb41beb282e68ccfa6fadc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 958.071442] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5658f2-e08c-40dc-a281-bc854fd2224c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.079174] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97f4ff8-6c27-4db7-aa04-2e1c5df3c62b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.110086] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446faea7-3af4-46bb-b2cd-725c82efcc6d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.116578] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4c195d-8fe3-4582-9f78-18ea242690a4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.128847] env[62109]: DEBUG nova.compute.provider_tree [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.129350] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg cc8803e9dc5d4c87964b47c9fd08a080 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 958.136038] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc8803e9dc5d4c87964b47c9fd08a080 [ 958.247927] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Successfully created port: 886b872c-e114-45cf-8809-f9755f25bde6 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 958.350436] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Releasing lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.350861] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 958.351046] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 958.351349] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c53f6b8-33e7-430b-9fa9-748bde348106 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.360248] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3e1966-24e2-4962-8208-833829875341 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.380878] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 49a0249a-f322-47f6-b723-2af2b701902c could not be found. [ 958.381101] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 958.381303] env[62109]: INFO nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 958.381573] env[62109]: DEBUG oslo.service.loopingcall [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.381875] env[62109]: DEBUG nova.compute.manager [-] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 958.382008] env[62109]: DEBUG nova.network.neutron [-] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 958.397174] env[62109]: DEBUG nova.network.neutron [-] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.397641] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3f86336af97b4d38a7039568ec5e9ee4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 958.404060] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f86336af97b4d38a7039568ec5e9ee4 [ 958.447284] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 958.449050] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg b0cbc9021a6b45d6b074d9547fe766c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 958.480875] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0cbc9021a6b45d6b074d9547fe766c4 [ 958.571200] env[62109]: DEBUG nova.compute.manager [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Received event network-changed-bc6eae64-58f5-45f8-84e4-a333eeaf85e1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 958.571384] env[62109]: DEBUG nova.compute.manager [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Refreshing instance network info cache due to event network-changed-bc6eae64-58f5-45f8-84e4-a333eeaf85e1. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 958.571593] env[62109]: DEBUG oslo_concurrency.lockutils [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] Acquiring lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.571725] env[62109]: DEBUG oslo_concurrency.lockutils [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] Acquired lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.571873] env[62109]: DEBUG nova.network.neutron [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Refreshing network info cache for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 958.572311] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] Expecting reply to msg 328a2ff1ca3a47fe8e1b3eeb62b578bc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 958.579284] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 328a2ff1ca3a47fe8e1b3eeb62b578bc [ 958.632276] env[62109]: DEBUG nova.scheduler.client.report [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 958.634732] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 58e2341478b14c19af5da1f4b831f36e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 958.649432] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58e2341478b14c19af5da1f4b831f36e [ 958.899597] env[62109]: DEBUG nova.network.neutron [-] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.900071] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 83f3d07cfcfa410a91f36d962f40fc4a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 958.908577] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83f3d07cfcfa410a91f36d962f40fc4a [ 958.953308] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 4cde5926682749a78e2e986ba2a68ab5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 958.980745] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cde5926682749a78e2e986ba2a68ab5 [ 959.072880] env[62109]: ERROR nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. [ 959.072880] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 959.072880] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 959.072880] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 959.072880] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 959.072880] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 959.072880] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 959.072880] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 959.072880] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 959.072880] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 959.072880] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 959.072880] env[62109]: ERROR nova.compute.manager raise self.value [ 959.072880] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 959.072880] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 959.072880] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 959.072880] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 959.073646] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 959.073646] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 959.073646] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. [ 959.073646] env[62109]: ERROR nova.compute.manager [ 959.073646] env[62109]: Traceback (most recent call last): [ 959.073646] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 959.073646] env[62109]: listener.cb(fileno) [ 959.073646] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 959.073646] env[62109]: result = function(*args, **kwargs) [ 959.073646] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 959.073646] env[62109]: return func(*args, **kwargs) [ 959.073646] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 959.073646] env[62109]: raise e [ 959.073646] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 959.073646] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 959.073646] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 959.073646] env[62109]: created_port_ids = self._update_ports_for_instance( [ 959.073646] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 959.073646] env[62109]: with excutils.save_and_reraise_exception(): [ 959.073646] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 959.073646] env[62109]: self.force_reraise() [ 959.073646] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 959.073646] env[62109]: raise self.value [ 959.073646] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 959.073646] env[62109]: updated_port = self._update_port( [ 959.073646] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 959.073646] env[62109]: _ensure_no_port_binding_failure(port) [ 959.073646] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 959.073646] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 959.074724] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. [ 959.074724] env[62109]: Removing descriptor: 19 [ 959.087503] env[62109]: DEBUG nova.network.neutron [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 959.136977] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.700s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.137823] env[62109]: ERROR nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Traceback (most recent call last): [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self.driver.spawn(context, instance, image_meta, [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self._vmops.spawn(context, instance, image_meta, injected_files, [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] vm_ref = self.build_virtual_machine(instance, [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] vif_infos = vmwarevif.get_vif_info(self._session, [ 959.137823] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] for vif in network_info: [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] return self._sync_wrapper(fn, *args, **kwargs) [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self.wait() [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self[:] = self._gt.wait() [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] return self._exit_event.wait() [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] result = hub.switch() [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 959.138166] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] return self.greenlet.switch() [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] result = function(*args, **kwargs) [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] return func(*args, **kwargs) [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] raise e [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] nwinfo = self.network_api.allocate_for_instance( [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] created_port_ids = self._update_ports_for_instance( [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] with excutils.save_and_reraise_exception(): [ 959.138505] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] self.force_reraise() [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] raise self.value [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] updated_port = self._update_port( [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] _ensure_no_port_binding_failure(port) [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] raise exception.PortBindingFailed(port_id=port['id']) [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] nova.exception.PortBindingFailed: Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. [ 959.138853] env[62109]: ERROR nova.compute.manager [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] [ 959.139164] env[62109]: DEBUG nova.compute.utils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 959.139795] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.139s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.141597] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg bb597fadb2294ac88c76b600af8c9a1c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 959.143088] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Build of instance 8bd1a8aa-844b-47ca-9296-0c30af695984 was re-scheduled: Binding failed for port 803e3dd5-6dbb-437e-931c-d3616c54eafb, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 959.143499] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 959.143718] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Acquiring lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.143861] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Acquired lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.144047] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 959.144431] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg a499640bac7342d9b90b0c304bbb07a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 959.150778] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a499640bac7342d9b90b0c304bbb07a9 [ 959.158451] env[62109]: DEBUG nova.network.neutron [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.158858] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] Expecting reply to msg 3ea7b857f6114431ba67fead4c5749ff in queue reply_7522b64acfeb4981b1f36928b040d568 [ 959.171841] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ea7b857f6114431ba67fead4c5749ff [ 959.175298] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb597fadb2294ac88c76b600af8c9a1c [ 959.402341] env[62109]: INFO nova.compute.manager [-] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Took 1.02 seconds to deallocate network for instance. [ 959.404669] env[62109]: DEBUG nova.compute.claims [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 959.404845] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.456477] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 959.480926] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 959.481177] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 959.481335] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.481519] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 959.481666] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.481803] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 959.482004] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 959.482158] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 959.482317] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 959.482521] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 959.482709] env[62109]: DEBUG nova.virt.hardware [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 959.483543] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed77f3f-e13e-46fe-b4d1-c434ce7146d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.491799] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef951fbb-d2ba-41ed-833d-2a28248b680e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.505266] env[62109]: ERROR nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Traceback (most recent call last): [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] yield resources [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self.driver.spawn(context, instance, image_meta, [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] vm_ref = self.build_virtual_machine(instance, [ 959.505266] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] vif_infos = vmwarevif.get_vif_info(self._session, [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] for vif in network_info: [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] return self._sync_wrapper(fn, *args, **kwargs) [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self.wait() [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self[:] = self._gt.wait() [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] return self._exit_event.wait() [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 959.505629] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] current.throw(*self._exc) [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] result = function(*args, **kwargs) [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] return func(*args, **kwargs) [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] raise e [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] nwinfo = self.network_api.allocate_for_instance( [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] created_port_ids = self._update_ports_for_instance( [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] with excutils.save_and_reraise_exception(): [ 959.505984] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self.force_reraise() [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] raise self.value [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] updated_port = self._update_port( [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] _ensure_no_port_binding_failure(port) [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] raise exception.PortBindingFailed(port_id=port['id']) [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] nova.exception.PortBindingFailed: Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. [ 959.506334] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] [ 959.506334] env[62109]: INFO nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Terminating instance [ 959.507508] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.507663] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.507824] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 959.508279] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 96b701f757e1458b8abcc7bfd6494940 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 959.514474] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96b701f757e1458b8abcc7bfd6494940 [ 959.660415] env[62109]: DEBUG oslo_concurrency.lockutils [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] Releasing lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.660803] env[62109]: DEBUG nova.compute.manager [req-33e900b4-3030-4ded-9a79-b617cd859097 req-1c38aaca-87a9-4ba7-8210-8c1dfc0f120b service nova] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Received event network-vif-deleted-bc6eae64-58f5-45f8-84e4-a333eeaf85e1 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 959.664382] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 959.757307] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.757831] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 88b55afe09a14cc19d2527bc9efe48a0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 959.766243] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88b55afe09a14cc19d2527bc9efe48a0 [ 959.785749] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f71cecb-ad4e-40af-8d8e-ae2673ee5b16 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.793311] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d3c4ce-c597-4dd7-948d-f082db8945f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.822790] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d849e52-f773-4e0c-9ab5-3eafc5ba23e8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.829196] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0447b45d-1226-449f-9bc9-cab58453819b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.841484] env[62109]: DEBUG nova.compute.provider_tree [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.841958] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 4058a91adb874b57ad204fe7915144c9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 959.848833] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4058a91adb874b57ad204fe7915144c9 [ 960.025517] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 960.101949] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.102703] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 5e6d436d4b3b4b19be3b548189b41e7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 960.110352] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e6d436d4b3b4b19be3b548189b41e7d [ 960.259931] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Releasing lock "refresh_cache-8bd1a8aa-844b-47ca-9296-0c30af695984" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.260208] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 960.260521] env[62109]: DEBUG nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 960.260756] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 960.274996] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 960.275524] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg faedf25ac27e41ba9b0433057711c338 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 960.283555] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg faedf25ac27e41ba9b0433057711c338 [ 960.344483] env[62109]: DEBUG nova.scheduler.client.report [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 960.346978] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg ed7cf188fac44ea89a6439d114f7f28e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 960.358034] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed7cf188fac44ea89a6439d114f7f28e [ 960.598608] env[62109]: DEBUG nova.compute.manager [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Received event network-changed-886b872c-e114-45cf-8809-f9755f25bde6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 960.598888] env[62109]: DEBUG nova.compute.manager [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Refreshing instance network info cache due to event network-changed-886b872c-e114-45cf-8809-f9755f25bde6. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 960.599111] env[62109]: DEBUG oslo_concurrency.lockutils [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] Acquiring lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.605276] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.605799] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 960.606113] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 960.606457] env[62109]: DEBUG oslo_concurrency.lockutils [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] Acquired lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.606650] env[62109]: DEBUG nova.network.neutron [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Refreshing network info cache for port 886b872c-e114-45cf-8809-f9755f25bde6 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 960.607126] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] Expecting reply to msg a4015af246d54d1787237cd637633c69 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 960.610658] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81e498b3-c711-4c61-9925-b3054828663a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.614287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4015af246d54d1787237cd637633c69 [ 960.619106] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84c0a01-a22d-4578-804a-f179baa2348a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.640843] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54 could not be found. [ 960.640843] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 960.640843] env[62109]: INFO nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Took 0.03 seconds to destroy the instance on the hypervisor. [ 960.641000] env[62109]: DEBUG oslo.service.loopingcall [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.641194] env[62109]: DEBUG nova.compute.manager [-] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 960.641288] env[62109]: DEBUG nova.network.neutron [-] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 960.656241] env[62109]: DEBUG nova.network.neutron [-] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 960.656830] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 43815da348344eb6a3b3308a37ca9390 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 960.665320] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43815da348344eb6a3b3308a37ca9390 [ 960.778177] env[62109]: DEBUG nova.network.neutron [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.778682] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 3edd2c9697064e559f7f4b88536803eb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 960.786652] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3edd2c9697064e559f7f4b88536803eb [ 960.849638] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.710s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.850273] env[62109]: ERROR nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Traceback (most recent call last): [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self.driver.spawn(context, instance, image_meta, [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] vm_ref = self.build_virtual_machine(instance, [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] vif_infos = vmwarevif.get_vif_info(self._session, [ 960.850273] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] for vif in network_info: [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] return self._sync_wrapper(fn, *args, **kwargs) [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self.wait() [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self[:] = self._gt.wait() [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] return self._exit_event.wait() [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] current.throw(*self._exc) [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 960.850796] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] result = function(*args, **kwargs) [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] return func(*args, **kwargs) [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] raise e [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] nwinfo = self.network_api.allocate_for_instance( [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] created_port_ids = self._update_ports_for_instance( [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] with excutils.save_and_reraise_exception(): [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] self.force_reraise() [ 960.851204] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] raise self.value [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] updated_port = self._update_port( [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] _ensure_no_port_binding_failure(port) [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] raise exception.PortBindingFailed(port_id=port['id']) [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] nova.exception.PortBindingFailed: Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. [ 960.851610] env[62109]: ERROR nova.compute.manager [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] [ 960.851610] env[62109]: DEBUG nova.compute.utils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 960.852527] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.551s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.854283] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 79a4bb7ba039497d8332a9fba6a4a729 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 960.855708] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Build of instance 27fed863-1e27-4258-8b43-b8cd23e3c1c0 was re-scheduled: Binding failed for port 09fb75f7-ed62-435c-bde1-9bcbab653d49, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 960.856122] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 960.856343] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.856487] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.856645] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 960.857015] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 580dd3a6e8eb464b897b93d685e4e326 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 960.862652] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 580dd3a6e8eb464b897b93d685e4e326 [ 960.888405] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79a4bb7ba039497d8332a9fba6a4a729 [ 961.124774] env[62109]: DEBUG nova.network.neutron [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 961.159082] env[62109]: DEBUG nova.network.neutron [-] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.159606] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 282be655497d4f9db2a652739c85cc23 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 961.167850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 282be655497d4f9db2a652739c85cc23 [ 961.219484] env[62109]: DEBUG nova.network.neutron [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.220069] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] Expecting reply to msg e279d2a53fb247dababfa9c318b9ce56 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 961.228394] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e279d2a53fb247dababfa9c318b9ce56 [ 961.280969] env[62109]: INFO nova.compute.manager [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] [instance: 8bd1a8aa-844b-47ca-9296-0c30af695984] Took 1.02 seconds to deallocate network for instance. [ 961.282819] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 240f63f5837e486ba93c2a5584784e09 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 961.314078] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 240f63f5837e486ba93c2a5584784e09 [ 961.374803] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 961.445046] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.445904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg cccbf56c3cc5411090230ec1af00c020 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 961.453340] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cccbf56c3cc5411090230ec1af00c020 [ 961.489027] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e882fee8-e177-4827-bdd9-9b1c5cd441f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.496558] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa6942f-f891-47a2-8035-889c41c0d849 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.525116] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62aac28-ccec-4cf5-abd0-a35a260e9ad7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.531559] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d1c8a8-b4df-4b95-9267-368f16a6d935 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.544994] env[62109]: DEBUG nova.compute.provider_tree [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.545414] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg b41e9d3772984591a5ef7ea9e8dc374b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 961.552383] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b41e9d3772984591a5ef7ea9e8dc374b [ 961.662018] env[62109]: INFO nova.compute.manager [-] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Took 1.02 seconds to deallocate network for instance. [ 961.664650] env[62109]: DEBUG nova.compute.claims [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 961.664915] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.722163] env[62109]: DEBUG oslo_concurrency.lockutils [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] Releasing lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.722477] env[62109]: DEBUG nova.compute.manager [req-ea46b60c-2e56-4c18-b8cb-2b176a96810c req-cb6c2dc8-7158-4f45-b2ba-affd555937e2 service nova] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Received event network-vif-deleted-886b872c-e114-45cf-8809-f9755f25bde6 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 961.787530] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 3eb3cac75fa24f149d79c43fa282be88 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 961.816696] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3eb3cac75fa24f149d79c43fa282be88 [ 961.948222] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-27fed863-1e27-4258-8b43-b8cd23e3c1c0" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.948545] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 961.948741] env[62109]: DEBUG nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 961.948909] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 961.963197] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 961.963719] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg a2afed3eb66240408ca11c3ab7ce2ad4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 961.970195] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2afed3eb66240408ca11c3ab7ce2ad4 [ 962.047633] env[62109]: DEBUG nova.scheduler.client.report [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.050106] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg bdafc65f4cec49c486a0fe7e87fe15fd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 962.060777] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdafc65f4cec49c486a0fe7e87fe15fd [ 962.309782] env[62109]: INFO nova.scheduler.client.report [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Deleted allocations for instance 8bd1a8aa-844b-47ca-9296-0c30af695984 [ 962.316058] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Expecting reply to msg 7c86803cd097413096d8f90745275592 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 962.327154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c86803cd097413096d8f90745275592 [ 962.465578] env[62109]: DEBUG nova.network.neutron [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.466139] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg a549ef9c10434561a0dda15787ea6486 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 962.474492] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a549ef9c10434561a0dda15787ea6486 [ 962.552883] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.700s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.553517] env[62109]: ERROR nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Traceback (most recent call last): [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self.driver.spawn(context, instance, image_meta, [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] vm_ref = self.build_virtual_machine(instance, [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] vif_infos = vmwarevif.get_vif_info(self._session, [ 962.553517] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] for vif in network_info: [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] return self._sync_wrapper(fn, *args, **kwargs) [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self.wait() [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self[:] = self._gt.wait() [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] return self._exit_event.wait() [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] current.throw(*self._exc) [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 962.553866] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] result = function(*args, **kwargs) [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] return func(*args, **kwargs) [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] raise e [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] nwinfo = self.network_api.allocate_for_instance( [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] created_port_ids = self._update_ports_for_instance( [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] with excutils.save_and_reraise_exception(): [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] self.force_reraise() [ 962.554211] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] raise self.value [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] updated_port = self._update_port( [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] _ensure_no_port_binding_failure(port) [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] raise exception.PortBindingFailed(port_id=port['id']) [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] nova.exception.PortBindingFailed: Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. [ 962.554614] env[62109]: ERROR nova.compute.manager [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] [ 962.554614] env[62109]: DEBUG nova.compute.utils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 962.555382] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.461s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.555554] env[62109]: DEBUG nova.objects.instance [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] [instance: 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 962.557259] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg c9ebd7b3841e471ea90053ad11df64b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 962.558548] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Build of instance 228d2a6d-6c16-472c-9326-2e4576d9648c was re-scheduled: Binding failed for port ff125b21-2e96-4688-8b92-5ff1f1891e83, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 962.559173] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 962.559395] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Acquiring lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.559534] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Acquired lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.559778] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 962.560191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 71d3b6c2cec740c5b7d202ed1e6d9b2d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 962.566282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71d3b6c2cec740c5b7d202ed1e6d9b2d [ 962.587735] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9ebd7b3841e471ea90053ad11df64b5 [ 962.818064] env[62109]: DEBUG oslo_concurrency.lockutils [None req-8bd3e89f-4adc-4738-9e63-c6a5798c33ed tempest-ServerTagsTestJSON-1874445576 tempest-ServerTagsTestJSON-1874445576-project-member] Lock "8bd1a8aa-844b-47ca-9296-0c30af695984" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.469s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.968541] env[62109]: INFO nova.compute.manager [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 27fed863-1e27-4258-8b43-b8cd23e3c1c0] Took 1.02 seconds to deallocate network for instance. [ 962.970289] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 2045efcec0eb4dc09228e416821d4bb5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 962.999898] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2045efcec0eb4dc09228e416821d4bb5 [ 963.063565] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 89e4949bd4994c93a89ae37d4526db83 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 963.072944] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89e4949bd4994c93a89ae37d4526db83 [ 963.078654] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 963.147521] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.148117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 9b21723ed5434e3aadac5d487298a537 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 963.155772] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b21723ed5434e3aadac5d487298a537 [ 963.475346] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 3608722ec4cc4e26b50466a594ad27b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 963.513766] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3608722ec4cc4e26b50466a594ad27b0 [ 963.568336] env[62109]: DEBUG oslo_concurrency.lockutils [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.568805] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-04340e57-c9b7-47e9-83d9-a9bcc609b11d tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg ec5c7841cb8849e89334e5753ec2023b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 963.569747] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.924s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.572749] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 06927b28d78643f0a11f111585e86694 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 963.584301] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec5c7841cb8849e89334e5753ec2023b [ 963.615520] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06927b28d78643f0a11f111585e86694 [ 963.650589] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Releasing lock "refresh_cache-228d2a6d-6c16-472c-9326-2e4576d9648c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.650797] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 963.650981] env[62109]: DEBUG nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 963.651143] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 963.668122] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 963.668765] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 7b9f0bd55ba3444fa59b32369ca31219 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 963.675506] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b9f0bd55ba3444fa59b32369ca31219 [ 964.002396] env[62109]: INFO nova.scheduler.client.report [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Deleted allocations for instance 27fed863-1e27-4258-8b43-b8cd23e3c1c0 [ 964.009636] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 72162104dcaf4bb9bc1b3f0250a90518 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 964.019326] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72162104dcaf4bb9bc1b3f0250a90518 [ 964.172031] env[62109]: DEBUG nova.network.neutron [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.172420] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 257a8b500a484489af17c8c07fb3ac82 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 964.180943] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 257a8b500a484489af17c8c07fb3ac82 [ 964.186355] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6081e850-4b1f-433d-bbed-aed9194cb4f5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.195754] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f56890c-76af-4a92-b034-d7bb25429e70 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.230880] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c7bf29-a0c4-4ee0-93fd-35f87c869b84 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.240853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5671b0ba-78d0-41a8-9d35-45ced97397c6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.255489] env[62109]: DEBUG nova.compute.provider_tree [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.256214] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 361440e959d941e0b1110a5ac0294645 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 964.264765] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 361440e959d941e0b1110a5ac0294645 [ 964.511616] env[62109]: DEBUG oslo_concurrency.lockutils [None req-0a5a4312-162f-4102-bb0b-f6b8f1f79df9 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "27fed863-1e27-4258-8b43-b8cd23e3c1c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.917s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.675244] env[62109]: INFO nova.compute.manager [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] [instance: 228d2a6d-6c16-472c-9326-2e4576d9648c] Took 1.02 seconds to deallocate network for instance. [ 964.677190] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg 0504c047fc02448fa6425f02d1af017b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 964.741016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0504c047fc02448fa6425f02d1af017b [ 964.758726] env[62109]: DEBUG nova.scheduler.client.report [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 964.761484] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 0e1aea029abb46a29cbd30c24bf637bc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 964.772129] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e1aea029abb46a29cbd30c24bf637bc [ 965.182309] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg ecbbb6cb4c29448a8a3036ab6f79849e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 965.213401] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecbbb6cb4c29448a8a3036ab6f79849e [ 965.264400] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.695s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.264998] env[62109]: ERROR nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Traceback (most recent call last): [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self.driver.spawn(context, instance, image_meta, [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] vm_ref = self.build_virtual_machine(instance, [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] vif_infos = vmwarevif.get_vif_info(self._session, [ 965.264998] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] for vif in network_info: [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] return self._sync_wrapper(fn, *args, **kwargs) [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self.wait() [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self[:] = self._gt.wait() [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] return self._exit_event.wait() [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] current.throw(*self._exc) [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 965.265361] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] result = function(*args, **kwargs) [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] return func(*args, **kwargs) [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] raise e [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] nwinfo = self.network_api.allocate_for_instance( [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] created_port_ids = self._update_ports_for_instance( [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] with excutils.save_and_reraise_exception(): [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] self.force_reraise() [ 965.265797] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] raise self.value [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] updated_port = self._update_port( [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] _ensure_no_port_binding_failure(port) [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] raise exception.PortBindingFailed(port_id=port['id']) [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] nova.exception.PortBindingFailed: Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. [ 965.266220] env[62109]: ERROR nova.compute.manager [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] [ 965.266220] env[62109]: DEBUG nova.compute.utils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 965.267084] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.368s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.269015] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 9e72297acded41e7ba8b0425c89aef90 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 965.269811] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Build of instance a879b81d-fb5a-483b-9c2a-4a5c416c1caa was re-scheduled: Binding failed for port c678b72f-ea04-4809-afb1-fe2f0b013d2b, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 965.270231] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 965.270456] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.270597] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquired lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.270753] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 965.271111] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg b8f2fda9337843418fa31f7e97a929fd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 965.280117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8f2fda9337843418fa31f7e97a929fd [ 965.315187] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e72297acded41e7ba8b0425c89aef90 [ 965.705956] env[62109]: INFO nova.scheduler.client.report [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Deleted allocations for instance 228d2a6d-6c16-472c-9326-2e4576d9648c [ 965.712039] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Expecting reply to msg d926c90288224431bd6354c270d53ba1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 965.733389] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d926c90288224431bd6354c270d53ba1 [ 965.790947] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.878907] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.879469] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 3314036c819248fa91f7785691d9aee3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 965.886962] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3314036c819248fa91f7785691d9aee3 [ 965.890808] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05aa6ae0-7db3-4c30-b29d-006af7b2257b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.900200] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aca039e-946f-4805-8705-0fcbfa5beefb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.929601] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b65104d-b2ae-4335-b345-223737e960bb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.937622] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a15592a-0757-4f14-b03a-2986fec6f3b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.952418] env[62109]: DEBUG nova.compute.provider_tree [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.952989] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 38c0e19f812c482da42a76ce0a86d13a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 965.960697] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38c0e19f812c482da42a76ce0a86d13a [ 966.003160] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "138f2594-adbe-4ce2-a395-40fae312981b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.003379] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "138f2594-adbe-4ce2-a395-40fae312981b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.003824] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 87dcee1f686d4fa9b324416c78ed51d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.012851] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87dcee1f686d4fa9b324416c78ed51d0 [ 966.220231] env[62109]: DEBUG oslo_concurrency.lockutils [None req-44cc891d-a696-452c-824e-969d4e3bfe35 tempest-InstanceActionsV221TestJSON-1207156857 tempest-InstanceActionsV221TestJSON-1207156857-project-member] Lock "228d2a6d-6c16-472c-9326-2e4576d9648c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.096s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.363020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.363245] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.363695] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 1b573646a3d64bebacfa3e6f0ed482e8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.370825] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b573646a3d64bebacfa3e6f0ed482e8 [ 966.383391] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Releasing lock "refresh_cache-a879b81d-fb5a-483b-9c2a-4a5c416c1caa" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.383602] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 966.383781] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.383943] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 966.399238] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.399777] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 57ab446f7e9b4932931fd2af70fa1423 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.406904] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57ab446f7e9b4932931fd2af70fa1423 [ 966.456289] env[62109]: DEBUG nova.scheduler.client.report [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.460507] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg adc550161ff24852bc1aa00e5c05b7c8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.474853] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adc550161ff24852bc1aa00e5c05b7c8 [ 966.505930] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 966.507741] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 5aa6e880871b4244bfb3d293dd4e6d3b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.537919] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aa6e880871b4244bfb3d293dd4e6d3b [ 966.865647] env[62109]: DEBUG nova.compute.manager [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 966.867601] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 7f984f06f96f4981932ce7d7017cc92e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.897294] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f984f06f96f4981932ce7d7017cc92e [ 966.901723] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.902257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg d19e43aadb234c869af1990e29f10cb4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.910213] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d19e43aadb234c869af1990e29f10cb4 [ 966.966001] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.699s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.966700] env[62109]: ERROR nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Traceback (most recent call last): [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self.driver.spawn(context, instance, image_meta, [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] vm_ref = self.build_virtual_machine(instance, [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] vif_infos = vmwarevif.get_vif_info(self._session, [ 966.966700] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] for vif in network_info: [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] return self._sync_wrapper(fn, *args, **kwargs) [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self.wait() [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self[:] = self._gt.wait() [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] return self._exit_event.wait() [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] current.throw(*self._exc) [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 966.967144] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] result = function(*args, **kwargs) [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] return func(*args, **kwargs) [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] raise e [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] nwinfo = self.network_api.allocate_for_instance( [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] created_port_ids = self._update_ports_for_instance( [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] with excutils.save_and_reraise_exception(): [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] self.force_reraise() [ 966.967546] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] raise self.value [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] updated_port = self._update_port( [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] _ensure_no_port_binding_failure(port) [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] raise exception.PortBindingFailed(port_id=port['id']) [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] nova.exception.PortBindingFailed: Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. [ 966.967956] env[62109]: ERROR nova.compute.manager [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] [ 966.967956] env[62109]: DEBUG nova.compute.utils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 966.968801] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.984s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.969035] env[62109]: DEBUG nova.objects.instance [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lazy-loading 'resources' on Instance uuid 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.969410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg d7322b3ae23a4c38ae36997a42cd30f1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.970741] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Build of instance 9ab3a71f-7e26-4d29-b006-6dbebcee16e1 was re-scheduled: Binding failed for port 977b3bb3-ccfc-4735-af64-154240ffc670, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 966.971203] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 966.971459] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquiring lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.971642] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Acquired lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.971836] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 966.972282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 221c8a004ffa4dafac7b5085b6ab7f02 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 966.975876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7322b3ae23a4c38ae36997a42cd30f1 [ 966.979006] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 221c8a004ffa4dafac7b5085b6ab7f02 [ 967.029511] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.387412] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.404662] env[62109]: INFO nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: a879b81d-fb5a-483b-9c2a-4a5c416c1caa] Took 1.02 seconds to deallocate network for instance. [ 967.406497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 6db6a9cd355044319f66f665c809ecc6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 967.448371] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6db6a9cd355044319f66f665c809ecc6 [ 967.498369] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 967.617710] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.618281] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 9f974fd94e8d40b6bf048de67a58ac6a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 967.626968] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f974fd94e8d40b6bf048de67a58ac6a [ 967.633658] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b0712b-d1c1-4004-8611-42faa5527e0f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.642445] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab91ea20-2811-40cc-811e-4013b75b56a8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.673183] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f33da05-3097-42cd-b433-dea6947ef8ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.680732] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69943caa-7bc5-46db-add0-da7d94752cf4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.695096] env[62109]: DEBUG nova.compute.provider_tree [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.695719] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 9953dd9ea75740feba1e66487abcd5d9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 967.702992] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9953dd9ea75740feba1e66487abcd5d9 [ 967.920025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 2da5e1afb28e48878c07c7c39ae46d06 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 967.944364] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2da5e1afb28e48878c07c7c39ae46d06 [ 968.123143] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Releasing lock "refresh_cache-9ab3a71f-7e26-4d29-b006-6dbebcee16e1" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.123143] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 968.123143] env[62109]: DEBUG nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 968.123143] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 968.138999] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 968.139591] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 2ea5697977524e75a935aaec984dd647 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 968.147357] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ea5697977524e75a935aaec984dd647 [ 968.199092] env[62109]: DEBUG nova.scheduler.client.report [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.201535] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 8ffcd2959920433b927ce39382de6578 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 968.215960] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ffcd2959920433b927ce39382de6578 [ 968.437960] env[62109]: INFO nova.scheduler.client.report [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Deleted allocations for instance a879b81d-fb5a-483b-9c2a-4a5c416c1caa [ 968.444687] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg bf77d38637cf4c1bb1e77868d18baa0f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 968.457663] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf77d38637cf4c1bb1e77868d18baa0f [ 968.642143] env[62109]: DEBUG nova.network.neutron [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.642733] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 0738d6231a1140d190c03f1bd3288b46 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 968.654141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0738d6231a1140d190c03f1bd3288b46 [ 968.704223] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.712797] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.853s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.714856] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 2a1d792de37c44cdb28699d4a99a40cc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 968.727430] env[62109]: INFO nova.scheduler.client.report [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Deleted allocations for instance 0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3 [ 968.730745] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 37e4317972c745cbbdef8dd6c8cd5653 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 968.755197] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a1d792de37c44cdb28699d4a99a40cc [ 968.776714] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37e4317972c745cbbdef8dd6c8cd5653 [ 968.947092] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "a879b81d-fb5a-483b-9c2a-4a5c416c1caa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.330s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.152717] env[62109]: INFO nova.compute.manager [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] [instance: 9ab3a71f-7e26-4d29-b006-6dbebcee16e1] Took 1.03 seconds to deallocate network for instance. [ 969.154448] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg d6c835edca324dcd81bd4cbef951a213 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 969.187527] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6c835edca324dcd81bd4cbef951a213 [ 969.236165] env[62109]: DEBUG oslo_concurrency.lockutils [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Lock "0be23f0b-0c43-4eb1-8572-b8ef84f6c5b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.925s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.236734] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-f71c6376-f142-4a4c-8a16-c83f4d0f138b tempest-ServerShowV254Test-2031808440 tempest-ServerShowV254Test-2031808440-project-member] Expecting reply to msg 7607ba6128784f6ca0651ad0cbde5567 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 969.254312] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7607ba6128784f6ca0651ad0cbde5567 [ 969.363066] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67998f82-8f29-4708-84a6-17e86387c069 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.370987] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b59bf58-8253-4140-ad4c-26c133a1959f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.402817] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1c6ecf-ed8b-4635-92cc-d21d750070b3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.413154] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8076e0-1d10-44b5-97cc-3f6645b2bbc1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.432266] env[62109]: DEBUG nova.compute.provider_tree [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.432266] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 94fb884cf11043b7bba07cd09ec219a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 969.440681] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94fb884cf11043b7bba07cd09ec219a2 [ 969.659199] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg 22912ce729c0477699d47f6db256d6ed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 969.697599] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22912ce729c0477699d47f6db256d6ed [ 969.938331] env[62109]: DEBUG nova.scheduler.client.report [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 969.938331] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 6fdcaab601fb46679d210e36025ee073 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 969.950495] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fdcaab601fb46679d210e36025ee073 [ 970.191350] env[62109]: INFO nova.scheduler.client.report [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Deleted allocations for instance 9ab3a71f-7e26-4d29-b006-6dbebcee16e1 [ 970.200388] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Expecting reply to msg e84cf5e36c6f4bda8613d4b12b30f750 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 970.216403] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e84cf5e36c6f4bda8613d4b12b30f750 [ 970.445831] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.739s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.446536] env[62109]: ERROR nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Traceback (most recent call last): [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self.driver.spawn(context, instance, image_meta, [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] vm_ref = self.build_virtual_machine(instance, [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] vif_infos = vmwarevif.get_vif_info(self._session, [ 970.446536] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] for vif in network_info: [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] return self._sync_wrapper(fn, *args, **kwargs) [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self.wait() [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self[:] = self._gt.wait() [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] return self._exit_event.wait() [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] result = hub.switch() [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 970.446899] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] return self.greenlet.switch() [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] result = function(*args, **kwargs) [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] return func(*args, **kwargs) [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] raise e [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] nwinfo = self.network_api.allocate_for_instance( [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] created_port_ids = self._update_ports_for_instance( [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] with excutils.save_and_reraise_exception(): [ 970.447244] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] self.force_reraise() [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] raise self.value [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] updated_port = self._update_port( [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] _ensure_no_port_binding_failure(port) [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] raise exception.PortBindingFailed(port_id=port['id']) [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] nova.exception.PortBindingFailed: Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. [ 970.447589] env[62109]: ERROR nova.compute.manager [instance: b04cc451-a497-474f-90dd-282a469ff3c2] [ 970.447934] env[62109]: DEBUG nova.compute.utils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 970.448530] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.037s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.450022] env[62109]: INFO nova.compute.claims [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 970.451754] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 590e8b1b515742dc9987ac56c3379b09 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 970.454326] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Build of instance b04cc451-a497-474f-90dd-282a469ff3c2 was re-scheduled: Binding failed for port 73a1a0e1-a2b2-4bfd-af75-9650be5f9837, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 970.454822] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 970.455685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.455685] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquired lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.455685] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 970.455837] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg b27b6a2d2474448490121f7855bac6e6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 970.462232] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b27b6a2d2474448490121f7855bac6e6 [ 970.496232] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 590e8b1b515742dc9987ac56c3379b09 [ 970.702900] env[62109]: DEBUG oslo_concurrency.lockutils [None req-34b71be8-5b92-4991-982d-bb3cebe6c941 tempest-MultipleCreateTestJSON-1958664483 tempest-MultipleCreateTestJSON-1958664483-project-member] Lock "9ab3a71f-7e26-4d29-b006-6dbebcee16e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.055s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.956725] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 1953a3e3331b4c04bbc698f6b48d4fd6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 970.966675] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1953a3e3331b4c04bbc698f6b48d4fd6 [ 970.972248] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 971.056147] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.056471] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 0aab84abf1d14d43870ec5c7ad1433cd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 971.065614] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aab84abf1d14d43870ec5c7ad1433cd [ 971.449123] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Acquiring lock "121dbfda-87d9-4733-a7d2-3ffa6f54df36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.449359] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Lock "121dbfda-87d9-4733-a7d2-3ffa6f54df36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.449809] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 18bd78ffa192483da103a190a65b37ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 971.460141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18bd78ffa192483da103a190a65b37ca [ 971.560957] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Releasing lock "refresh_cache-b04cc451-a497-474f-90dd-282a469ff3c2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.561204] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 971.561369] env[62109]: DEBUG nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 971.561533] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 971.577222] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 971.577764] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg fcfce5309e284a8398ef1d638db69ed0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 971.583777] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcfce5309e284a8398ef1d638db69ed0 [ 971.590114] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ed9e22-67dc-4b1b-a909-318e2fa9b53b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.599286] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e1c306-79ea-4154-98b0-3764edfa4e92 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.630118] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b2d92e-75c4-4165-89c4-fbb8a2bb70a0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.637558] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0235029-ef5d-4e2b-9f71-335964165766 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.651536] env[62109]: DEBUG nova.compute.provider_tree [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.652074] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 155cc3d082e24370b5c20025d60e1382 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 971.665543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 155cc3d082e24370b5c20025d60e1382 [ 971.951369] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 971.953534] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 5d1916b745a44865b2859b863fe65ca3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 971.984117] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d1916b745a44865b2859b863fe65ca3 [ 972.079688] env[62109]: DEBUG nova.network.neutron [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.080262] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 9540985b88794b53affafefc19cc5177 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 972.088202] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9540985b88794b53affafefc19cc5177 [ 972.154952] env[62109]: DEBUG nova.scheduler.client.report [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 972.157612] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 0db6de61c7b0418e856dcda4011b478d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 972.184356] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0db6de61c7b0418e856dcda4011b478d [ 972.473159] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.582629] env[62109]: INFO nova.compute.manager [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: b04cc451-a497-474f-90dd-282a469ff3c2] Took 1.02 seconds to deallocate network for instance. [ 972.584812] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 0ae48b609c1d40d289119b552a977e8e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 972.627916] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ae48b609c1d40d289119b552a977e8e [ 972.660728] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.212s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.661136] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 972.662954] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg feaf682b82a84e98b5c1735d67c076b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 972.668098] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.880s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.668098] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 99ff3d9170624702a41be5a3c51826ad in queue reply_7522b64acfeb4981b1f36928b040d568 [ 972.697652] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg feaf682b82a84e98b5c1735d67c076b8 [ 972.716216] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99ff3d9170624702a41be5a3c51826ad [ 973.089589] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 23958e0c5e0347a9a601629648495447 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 973.126389] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23958e0c5e0347a9a601629648495447 [ 973.168970] env[62109]: DEBUG nova.compute.utils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 973.169611] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 60565370e8d7416cae371aa7003ef665 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 973.170572] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 973.171077] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 973.181987] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60565370e8d7416cae371aa7003ef665 [ 973.220570] env[62109]: DEBUG nova.policy [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68a934a299dc48b0a7fc35f93d79db99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0685baf7056496280733eae476a8935', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 973.280328] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cba787c-c8c0-4a47-8796-fddee1e43489 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.287993] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b465cc-e084-4135-83c2-b0b4968a0698 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.323845] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c58945-db17-4c6d-a939-1fbd319ff865 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.332490] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2235ff-c739-41f8-b4d9-1d8e7b515ec1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.348979] env[62109]: DEBUG nova.compute.provider_tree [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.349465] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg eb30630f42b94382b35c3de2b8390618 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 973.358258] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb30630f42b94382b35c3de2b8390618 [ 973.571473] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Successfully created port: 3e6eec16-c463-49ba-b551-ce1647747aad {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 973.610771] env[62109]: INFO nova.scheduler.client.report [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Deleted allocations for instance b04cc451-a497-474f-90dd-282a469ff3c2 [ 973.616750] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 480726d9f8d04d76af7a0cd67978af17 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 973.635390] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 480726d9f8d04d76af7a0cd67978af17 [ 973.676955] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 973.678783] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg afba426c1f424086b1decb6d804fca3a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 973.709403] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afba426c1f424086b1decb6d804fca3a [ 973.852404] env[62109]: DEBUG nova.scheduler.client.report [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 973.854729] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 1ad2782178c847eeb4486a97eb2bd9c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 973.865730] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ad2782178c847eeb4486a97eb2bd9c4 [ 974.123696] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ceb8b7a2-1f2f-4640-aa90-301c645b6da0 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "b04cc451-a497-474f-90dd-282a469ff3c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.434s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.182055] env[62109]: INFO nova.virt.block_device [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Booting with volume 63e98c38-5d45-4f38-ba12-8536414be71d at /dev/sda [ 974.217670] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-befe96b5-74b2-4052-9829-1e9d4d204837 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.226592] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679c7e73-2c29-4349-82fd-6b9db4033e90 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.248807] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6cd44c63-27e1-4c51-b63a-f19c1e6a7a87 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.257577] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c86da03-2c86-4fe9-8a56-184721f620d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.268356] env[62109]: DEBUG nova.compute.manager [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Received event network-changed-3e6eec16-c463-49ba-b551-ce1647747aad {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 974.268545] env[62109]: DEBUG nova.compute.manager [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Refreshing instance network info cache due to event network-changed-3e6eec16-c463-49ba-b551-ce1647747aad. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 974.268773] env[62109]: DEBUG oslo_concurrency.lockutils [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] Acquiring lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.268943] env[62109]: DEBUG oslo_concurrency.lockutils [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] Acquired lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.269020] env[62109]: DEBUG nova.network.neutron [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Refreshing network info cache for port 3e6eec16-c463-49ba-b551-ce1647747aad {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 974.269435] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] Expecting reply to msg 237d089edf2c4f239ef56c7a93ac8b50 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 974.276371] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 237d089edf2c4f239ef56c7a93ac8b50 [ 974.282380] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b89330-557d-4a3c-a2e5-7308c574e44d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.288430] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbe1221-19b1-4626-897b-4e5205b2a1ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.300551] env[62109]: DEBUG nova.virt.block_device [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Updating existing volume attachment record: 535aa4c1-4912-40c0-9e8a-36fdb52c5f68 {{(pid=62109) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 974.356763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.692s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.357397] env[62109]: ERROR nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Traceback (most recent call last): [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self.driver.spawn(context, instance, image_meta, [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self._vmops.spawn(context, instance, image_meta, injected_files, [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] vm_ref = self.build_virtual_machine(instance, [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] vif_infos = vmwarevif.get_vif_info(self._session, [ 974.357397] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] for vif in network_info: [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] return self._sync_wrapper(fn, *args, **kwargs) [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self.wait() [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self[:] = self._gt.wait() [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] return self._exit_event.wait() [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] current.throw(*self._exc) [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 974.357963] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] result = function(*args, **kwargs) [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] return func(*args, **kwargs) [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] raise e [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] nwinfo = self.network_api.allocate_for_instance( [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] created_port_ids = self._update_ports_for_instance( [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] with excutils.save_and_reraise_exception(): [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] self.force_reraise() [ 974.358642] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] raise self.value [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] updated_port = self._update_port( [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] _ensure_no_port_binding_failure(port) [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] raise exception.PortBindingFailed(port_id=port['id']) [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] nova.exception.PortBindingFailed: Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. [ 974.359925] env[62109]: ERROR nova.compute.manager [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] [ 974.359925] env[62109]: DEBUG nova.compute.utils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 974.360628] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.954s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.361370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 7118c84471bf47e4985656fb9f5996f4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 974.362814] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Build of instance d622ca94-7f5c-47f4-8077-ff37f64eea02 was re-scheduled: Binding failed for port 78639c1d-5491-49ff-8e1b-f3202998b190, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 974.363235] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 974.363456] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquiring lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.363600] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Acquired lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.363755] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 974.364133] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 57185576995540b299536307b57a67b4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 974.370452] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57185576995540b299536307b57a67b4 [ 974.401894] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7118c84471bf47e4985656fb9f5996f4 [ 974.429117] env[62109]: ERROR nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. [ 974.429117] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 974.429117] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 974.429117] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 974.429117] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 974.429117] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 974.429117] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 974.429117] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 974.429117] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 974.429117] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 974.429117] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 974.429117] env[62109]: ERROR nova.compute.manager raise self.value [ 974.429117] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 974.429117] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 974.429117] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 974.429117] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 974.429650] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 974.429650] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 974.429650] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. [ 974.429650] env[62109]: ERROR nova.compute.manager [ 974.429650] env[62109]: Traceback (most recent call last): [ 974.429650] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 974.429650] env[62109]: listener.cb(fileno) [ 974.429650] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 974.429650] env[62109]: result = function(*args, **kwargs) [ 974.429650] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 974.429650] env[62109]: return func(*args, **kwargs) [ 974.429650] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 974.429650] env[62109]: raise e [ 974.429650] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 974.429650] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 974.429650] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 974.429650] env[62109]: created_port_ids = self._update_ports_for_instance( [ 974.429650] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 974.429650] env[62109]: with excutils.save_and_reraise_exception(): [ 974.429650] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 974.429650] env[62109]: self.force_reraise() [ 974.429650] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 974.429650] env[62109]: raise self.value [ 974.429650] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 974.429650] env[62109]: updated_port = self._update_port( [ 974.429650] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 974.429650] env[62109]: _ensure_no_port_binding_failure(port) [ 974.429650] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 974.429650] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 974.430545] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. [ 974.430545] env[62109]: Removing descriptor: 19 [ 974.785776] env[62109]: DEBUG nova.network.neutron [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 974.854881] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 0efb6b2509e5411186d6d1d5596148ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 974.871002] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0efb6b2509e5411186d6d1d5596148ba [ 974.889496] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 974.907789] env[62109]: DEBUG nova.network.neutron [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.908447] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] Expecting reply to msg 5b700c070c224db992256e6d2a47e597 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 974.924145] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b700c070c224db992256e6d2a47e597 [ 974.976899] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2d0ddb-1784-43e5-a99d-33fcb9f01418 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.979959] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.980495] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 166f018fbbac4fce8310fa8968828abb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 974.986142] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a559f6de-aab3-4c19-afe2-b21cafc1258f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.990962] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 166f018fbbac4fce8310fa8968828abb [ 975.017794] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c584d59-cc49-40be-a9c7-162a6f96445b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.025789] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba489147-0967-41f6-a1ba-7a0fbec239f7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.038871] env[62109]: DEBUG nova.compute.provider_tree [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.039372] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg b7cd0b40020441a4bf966e6297e488a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 975.045922] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7cd0b40020441a4bf966e6297e488a5 [ 975.395793] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg d3620c5ef23b4e788fa07d725ef78182 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 975.406478] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3620c5ef23b4e788fa07d725ef78182 [ 975.410647] env[62109]: DEBUG oslo_concurrency.lockutils [req-037f2069-d746-4e79-b5fb-c89b605282ad req-d42f44c5-c680-4f5f-baa2-444a966ce890 service nova] Releasing lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.482372] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Releasing lock "refresh_cache-d622ca94-7f5c-47f4-8077-ff37f64eea02" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.482624] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 975.482808] env[62109]: DEBUG nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 975.482976] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 975.498455] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 975.499061] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg aa0f94c1026348a6b0df1cee83d27be6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 975.505991] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa0f94c1026348a6b0df1cee83d27be6 [ 975.541482] env[62109]: DEBUG nova.scheduler.client.report [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 975.543902] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 178f1f46b9cf4aec81e7f085e3eff400 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 975.554609] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 178f1f46b9cf4aec81e7f085e3eff400 [ 975.904440] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 03b6d4bade1a4dbb939ef0cf4fbafade in queue reply_7522b64acfeb4981b1f36928b040d568 [ 975.938932] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03b6d4bade1a4dbb939ef0cf4fbafade [ 976.002035] env[62109]: DEBUG nova.network.neutron [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.002528] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 5b2f9b4f3dd946b78ef7c1d5a472cb0f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 976.011521] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b2f9b4f3dd946b78ef7c1d5a472cb0f [ 976.051797] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.692s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.052125] env[62109]: ERROR nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Traceback (most recent call last): [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self.driver.spawn(context, instance, image_meta, [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] vm_ref = self.build_virtual_machine(instance, [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] vif_infos = vmwarevif.get_vif_info(self._session, [ 976.052125] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] for vif in network_info: [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] return self._sync_wrapper(fn, *args, **kwargs) [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self.wait() [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self[:] = self._gt.wait() [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] return self._exit_event.wait() [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] current.throw(*self._exc) [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 976.052502] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] result = function(*args, **kwargs) [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] return func(*args, **kwargs) [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] raise e [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] nwinfo = self.network_api.allocate_for_instance( [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] created_port_ids = self._update_ports_for_instance( [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] with excutils.save_and_reraise_exception(): [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] self.force_reraise() [ 976.052885] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] raise self.value [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] updated_port = self._update_port( [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] _ensure_no_port_binding_failure(port) [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] raise exception.PortBindingFailed(port_id=port['id']) [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] nova.exception.PortBindingFailed: Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. [ 976.053293] env[62109]: ERROR nova.compute.manager [instance: 49a0249a-f322-47f6-b723-2af2b701902c] [ 976.053293] env[62109]: DEBUG nova.compute.utils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 976.054048] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.389s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.055812] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg b71c260d3db742a9a4dbf682ccdb2a06 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 976.056943] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Build of instance 49a0249a-f322-47f6-b723-2af2b701902c was re-scheduled: Binding failed for port bc6eae64-58f5-45f8-84e4-a333eeaf85e1, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 976.057347] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 976.057566] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Acquiring lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.057711] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Acquired lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.057868] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 976.058291] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 3c9224d1717344778d5edc71ee6153e9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 976.069970] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c9224d1717344778d5edc71ee6153e9 [ 976.095671] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b71c260d3db742a9a4dbf682ccdb2a06 [ 976.282932] env[62109]: DEBUG nova.compute.manager [req-f644d584-eab5-427b-adff-a8861ec1054f req-57f4892c-4773-49c9-98cf-90d2dd8366d0 service nova] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Received event network-vif-deleted-3e6eec16-c463-49ba-b551-ce1647747aad {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 976.407675] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 976.408770] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 976.408770] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 976.408770] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.408988] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 976.409116] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.409392] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 976.409711] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 976.409905] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 976.410085] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 976.410250] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 976.410420] env[62109]: DEBUG nova.virt.hardware [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 976.411282] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4fa1a6-a7e8-4d9c-b1f4-40c1e510e8af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.420073] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2c02fd-73c1-4897-9b7b-75a4f3f7be7a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.433656] env[62109]: ERROR nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Traceback (most recent call last): [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] yield resources [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self.driver.spawn(context, instance, image_meta, [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] vm_ref = self.build_virtual_machine(instance, [ 976.433656] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] for vif in network_info: [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] return self._sync_wrapper(fn, *args, **kwargs) [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self.wait() [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self[:] = self._gt.wait() [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] return self._exit_event.wait() [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 976.434107] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] current.throw(*self._exc) [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] result = function(*args, **kwargs) [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] return func(*args, **kwargs) [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] raise e [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] nwinfo = self.network_api.allocate_for_instance( [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] created_port_ids = self._update_ports_for_instance( [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] with excutils.save_and_reraise_exception(): [ 976.434518] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self.force_reraise() [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] raise self.value [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] updated_port = self._update_port( [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] _ensure_no_port_binding_failure(port) [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] raise exception.PortBindingFailed(port_id=port['id']) [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] nova.exception.PortBindingFailed: Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. [ 976.434985] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] [ 976.434985] env[62109]: INFO nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Terminating instance [ 976.436184] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Acquiring lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.436403] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Acquired lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.436582] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 976.436976] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 83fb1850ea9846e6bd0d4a0ddb44ebb4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 976.443176] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83fb1850ea9846e6bd0d4a0ddb44ebb4 [ 976.505450] env[62109]: INFO nova.compute.manager [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] [instance: d622ca94-7f5c-47f4-8077-ff37f64eea02] Took 1.02 seconds to deallocate network for instance. [ 976.506232] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 4440283b91d044eca569e68216ee6f5c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 976.542109] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4440283b91d044eca569e68216ee6f5c [ 976.584958] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 976.663961] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84bfdf9-2384-445e-9140-74b9662a65cb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.667048] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.667554] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 16e37333d926488683c95bb341311043 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 976.673160] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c462b686-b804-4875-a72d-6746c2b588ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.676420] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16e37333d926488683c95bb341311043 [ 976.702650] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7a2b1e-7740-4d73-96b7-e34545740f5d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.709599] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fa3b66-991e-4a33-ba77-a9ed3497c689 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.723513] env[62109]: DEBUG nova.compute.provider_tree [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.724065] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 06f39ee50f874c739512806767313645 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 976.731600] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06f39ee50f874c739512806767313645 [ 976.873626] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "2e484358-0037-41b0-bf66-534fc7116d34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.873859] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "2e484358-0037-41b0-bf66-534fc7116d34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.874319] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg e407da01f83b49a9aa0f5ffa8756ac5e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 976.883543] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e407da01f83b49a9aa0f5ffa8756ac5e [ 976.952494] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 977.010632] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 5d1467b6c1794bea8460043454296cd6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.029526] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.030074] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 50e15ed1e5774d8cbb4d6292e3775ff0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.046370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50e15ed1e5774d8cbb4d6292e3775ff0 [ 977.059597] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d1467b6c1794bea8460043454296cd6 [ 977.170012] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Releasing lock "refresh_cache-49a0249a-f322-47f6-b723-2af2b701902c" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.170259] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 977.170443] env[62109]: DEBUG nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 977.170610] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 977.190057] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 977.190631] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg b23a3a2087274791acfdb770c8fb1327 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.200991] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b23a3a2087274791acfdb770c8fb1327 [ 977.226413] env[62109]: DEBUG nova.scheduler.client.report [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 977.229161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 9c1a7cc745fd4e32bee9ab72fba08be1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.239884] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c1a7cc745fd4e32bee9ab72fba08be1 [ 977.376673] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 977.378719] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 2098587b9ff045c1b30b48d9999a63df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.409899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2098587b9ff045c1b30b48d9999a63df [ 977.532213] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Releasing lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.533176] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 977.533554] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a039eaa4-c60e-491d-885a-43871ef0f09e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.537859] env[62109]: INFO nova.scheduler.client.report [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Deleted allocations for instance d622ca94-7f5c-47f4-8077-ff37f64eea02 [ 977.544852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Expecting reply to msg 84ac66f38ff24701813da5e303ae2e16 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.548139] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b5eb53-e8c9-405c-a9ca-8789da420ec9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.558637] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84ac66f38ff24701813da5e303ae2e16 [ 977.570832] env[62109]: WARNING nova.virt.vmwareapi.driver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 37af2c2e-9c4e-445d-b128-c4c9137e73ca could not be found. [ 977.571056] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 977.571336] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e4f0ab43-da97-47c5-ab81-66d04b4cb324 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.582481] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8c25c7-d0fd-4bbb-812b-7904b4bc8a00 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.613841] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 37af2c2e-9c4e-445d-b128-c4c9137e73ca could not be found. [ 977.614194] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 977.614436] env[62109]: INFO nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Took 0.08 seconds to destroy the instance on the hypervisor. [ 977.614724] env[62109]: DEBUG oslo.service.loopingcall [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.614986] env[62109]: DEBUG nova.compute.manager [-] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 977.615119] env[62109]: DEBUG nova.network.neutron [-] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 977.634094] env[62109]: DEBUG nova.network.neutron [-] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 977.634615] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5b5fde7ce61d4db8b31d26f24d41bdf9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.640441] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b5fde7ce61d4db8b31d26f24d41bdf9 [ 977.692936] env[62109]: DEBUG nova.network.neutron [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.693539] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 37caf2c1d9714faf995a83eb56d346c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.701399] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37caf2c1d9714faf995a83eb56d346c4 [ 977.731087] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.677s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.731685] env[62109]: ERROR nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Traceback (most recent call last): [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self.driver.spawn(context, instance, image_meta, [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] vm_ref = self.build_virtual_machine(instance, [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] vif_infos = vmwarevif.get_vif_info(self._session, [ 977.731685] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] for vif in network_info: [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] return self._sync_wrapper(fn, *args, **kwargs) [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self.wait() [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self[:] = self._gt.wait() [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] return self._exit_event.wait() [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] current.throw(*self._exc) [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 977.732056] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] result = function(*args, **kwargs) [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] return func(*args, **kwargs) [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] raise e [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] nwinfo = self.network_api.allocate_for_instance( [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] created_port_ids = self._update_ports_for_instance( [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] with excutils.save_and_reraise_exception(): [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] self.force_reraise() [ 977.732431] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] raise self.value [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] updated_port = self._update_port( [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] _ensure_no_port_binding_failure(port) [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] raise exception.PortBindingFailed(port_id=port['id']) [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] nova.exception.PortBindingFailed: Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. [ 977.732800] env[62109]: ERROR nova.compute.manager [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] [ 977.733107] env[62109]: DEBUG nova.compute.utils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 977.734636] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Build of instance f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54 was re-scheduled: Binding failed for port 886b872c-e114-45cf-8809-f9755f25bde6, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 977.735086] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 977.735349] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.735537] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.735732] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 977.736165] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a43ea929bcd14166990cff71f780a385 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.737282] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.708s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.739077] env[62109]: INFO nova.compute.claims [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 977.740631] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg c7263c1c23864073810c194fec313744 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 977.742469] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a43ea929bcd14166990cff71f780a385 [ 977.769970] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7263c1c23864073810c194fec313744 [ 977.896263] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.059245] env[62109]: DEBUG oslo_concurrency.lockutils [None req-fa20862a-8aee-4891-b61f-862bd7eaa9b5 tempest-ServerDiskConfigTestJSON-32558757 tempest-ServerDiskConfigTestJSON-32558757-project-member] Lock "d622ca94-7f5c-47f4-8077-ff37f64eea02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.621s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.136804] env[62109]: DEBUG nova.network.neutron [-] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.137346] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 57fe6d8d1d04474f837b4a77d9740bbc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 978.145648] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57fe6d8d1d04474f837b4a77d9740bbc [ 978.196501] env[62109]: INFO nova.compute.manager [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] [instance: 49a0249a-f322-47f6-b723-2af2b701902c] Took 1.03 seconds to deallocate network for instance. [ 978.198294] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 6b6d0efb62ed41dbb83b056bb1c5f9b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 978.233148] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b6d0efb62ed41dbb83b056bb1c5f9b5 [ 978.244373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 092c82d40c874e5abcf8a09ac8f060c1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 978.253282] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 978.258277] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 092c82d40c874e5abcf8a09ac8f060c1 [ 978.342822] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.343351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 88d20cee150640e092cfceb0cc745b95 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 978.351218] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88d20cee150640e092cfceb0cc745b95 [ 978.640402] env[62109]: INFO nova.compute.manager [-] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Took 1.02 seconds to deallocate network for instance. [ 978.703051] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg 75a6f92369004b888d821a106c77508b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 978.735354] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75a6f92369004b888d821a106c77508b [ 978.836503] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012366bc-2d97-44e8-b5bc-be246f995b50 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.844723] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed27c94-ceeb-4606-8b05-da57c1dd87bc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.847859] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.848131] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 978.848350] env[62109]: DEBUG nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 978.848513] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 978.876701] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 978.877248] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 29fd54d1ec5e404c8239819f286b371d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 978.881840] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57a3f83-b2c9-4c26-833c-5af27a965767 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.885678] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29fd54d1ec5e404c8239819f286b371d [ 978.886937] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa45c4e-9001-4bd5-b660-1975b70ac0c7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.901675] env[62109]: DEBUG nova.compute.provider_tree [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.902138] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 8b70ec11b79242319e591754e43151b2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 978.908493] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b70ec11b79242319e591754e43151b2 [ 979.196744] env[62109]: INFO nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Took 0.56 seconds to detach 1 volumes for instance. [ 979.198912] env[62109]: DEBUG nova.compute.claims [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 979.199087] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.223629] env[62109]: INFO nova.scheduler.client.report [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Deleted allocations for instance 49a0249a-f322-47f6-b723-2af2b701902c [ 979.239703] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Expecting reply to msg d80a741a0a364cae968ae59f325cbfc4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 979.245572] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d80a741a0a364cae968ae59f325cbfc4 [ 979.382094] env[62109]: DEBUG nova.network.neutron [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.382624] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 1e37c76ee4f24db28d1356c650b1b2ce in queue reply_7522b64acfeb4981b1f36928b040d568 [ 979.391126] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e37c76ee4f24db28d1356c650b1b2ce [ 979.404172] env[62109]: DEBUG nova.scheduler.client.report [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.406562] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 1f93f9634ccf44ba8b8b7114170ac7bf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 979.420251] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f93f9634ccf44ba8b8b7114170ac7bf [ 979.732428] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c78553e6-1311-4294-a4ff-f2ae1769ca0a tempest-ServersTestFqdnHostnames-1289654912 tempest-ServersTestFqdnHostnames-1289654912-project-member] Lock "49a0249a-f322-47f6-b723-2af2b701902c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.283s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.885433] env[62109]: INFO nova.compute.manager [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54] Took 1.04 seconds to deallocate network for instance. [ 979.887221] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 61a789d314ff4d6b952eaca2d03374d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 979.910515] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.173s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.911062] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 979.912834] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 9b8c515dbe68477992a0d3a76fe948d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 979.913808] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.527s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.915265] env[62109]: INFO nova.compute.claims [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.917082] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg bee316629ef3426d981597c364d702ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 979.946343] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61a789d314ff4d6b952eaca2d03374d4 [ 979.946893] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b8c515dbe68477992a0d3a76fe948d0 [ 979.957794] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bee316629ef3426d981597c364d702ec [ 980.392194] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 49a923cce4ec4caea6973d0332c45d4a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 980.420419] env[62109]: DEBUG nova.compute.utils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 980.421098] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 4c336a5912b74fbf854ffe34aa42b55b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 980.423063] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg dcf80e7ae15049eb97d2b2890eb47039 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 980.423829] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 980.423985] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 980.429042] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49a923cce4ec4caea6973d0332c45d4a [ 980.434801] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcf80e7ae15049eb97d2b2890eb47039 [ 980.437141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c336a5912b74fbf854ffe34aa42b55b [ 980.484089] env[62109]: DEBUG nova.policy [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d003a5943d141cc9165e43148dec381', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a669cc62964b05bdaa71442c08a566', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 980.844124] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Successfully created port: d6902f58-e935-4bf3-9383-425e88f02b8a {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.955102] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 980.955102] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 9ace1dbabede486fa853b484628a45f9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 980.962895] env[62109]: INFO nova.scheduler.client.report [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Deleted allocations for instance f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54 [ 980.976026] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg b80751ee6cf340d39d96a946a59c4ffe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 980.976026] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ace1dbabede486fa853b484628a45f9 [ 980.987151] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b80751ee6cf340d39d96a946a59c4ffe [ 981.058018] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26b0748-b6b7-4675-bb53-ddc6a86ade2e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.066034] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c29b88-cb3c-4ea3-bad8-3b1e63d59686 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.096861] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70820a1-50e3-4939-8e43-012c111251cf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.104309] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9172c994-4804-4fc0-b273-82b61a06511a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.117861] env[62109]: DEBUG nova.compute.provider_tree [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.118414] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 9f52b661378746df88fcf21b0eb8de19 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 981.125622] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f52b661378746df88fcf21b0eb8de19 [ 981.439888] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 2ea6c1ab1bcf4b1a9ffbe70eb65cb55f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 981.471547] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9c125533-59f7-41a5-9da8-a2dad34bc0fb tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "f9cb0f9d-fcc7-4241-b6db-ce1cf8f3ed54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.478s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.476361] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ea6c1ab1bcf4b1a9ffbe70eb65cb55f [ 981.620977] env[62109]: DEBUG nova.scheduler.client.report [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.623364] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 177a852b141746138116a4f6d2922712 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 981.637685] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 177a852b141746138116a4f6d2922712 [ 981.943494] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 981.971965] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 981.972237] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 981.972394] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.972572] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 981.972714] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.972858] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 981.973062] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 981.973225] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 981.973383] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 981.973539] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 981.973710] env[62109]: DEBUG nova.virt.hardware [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 981.974582] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3b600b-a651-4232-a0f2-bbd84a02a08a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.984251] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4637b421-bd9a-4d68-bd5d-ce000a34c22f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.109351] env[62109]: DEBUG nova.compute.manager [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Received event network-changed-d6902f58-e935-4bf3-9383-425e88f02b8a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 982.109544] env[62109]: DEBUG nova.compute.manager [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Refreshing instance network info cache due to event network-changed-d6902f58-e935-4bf3-9383-425e88f02b8a. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 982.110143] env[62109]: DEBUG oslo_concurrency.lockutils [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] Acquiring lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.110143] env[62109]: DEBUG oslo_concurrency.lockutils [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] Acquired lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.110143] env[62109]: DEBUG nova.network.neutron [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Refreshing network info cache for port d6902f58-e935-4bf3-9383-425e88f02b8a {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 982.110476] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] Expecting reply to msg e9ed05198cd44bd996504f57a6aabc96 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 982.117785] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9ed05198cd44bd996504f57a6aabc96 [ 982.126202] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.212s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.126684] env[62109]: DEBUG nova.compute.manager [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 982.128564] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 6010b85fda0648aea68ecd84f035677f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 982.129775] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.657s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.131199] env[62109]: INFO nova.compute.claims [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.132803] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 688fc9b6b82f4b448bf9abd73f026edd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 982.164903] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6010b85fda0648aea68ecd84f035677f [ 982.172149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 688fc9b6b82f4b448bf9abd73f026edd [ 982.441028] env[62109]: ERROR nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. [ 982.441028] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 982.441028] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 982.441028] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 982.441028] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 982.441028] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 982.441028] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 982.441028] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 982.441028] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 982.441028] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 982.441028] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 982.441028] env[62109]: ERROR nova.compute.manager raise self.value [ 982.441028] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 982.441028] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 982.441028] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 982.441028] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 982.441858] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 982.441858] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 982.441858] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. [ 982.441858] env[62109]: ERROR nova.compute.manager [ 982.441858] env[62109]: Traceback (most recent call last): [ 982.441858] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 982.441858] env[62109]: listener.cb(fileno) [ 982.441858] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 982.441858] env[62109]: result = function(*args, **kwargs) [ 982.441858] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 982.441858] env[62109]: return func(*args, **kwargs) [ 982.441858] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 982.441858] env[62109]: raise e [ 982.441858] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 982.441858] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 982.441858] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 982.441858] env[62109]: created_port_ids = self._update_ports_for_instance( [ 982.441858] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 982.441858] env[62109]: with excutils.save_and_reraise_exception(): [ 982.441858] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 982.441858] env[62109]: self.force_reraise() [ 982.441858] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 982.441858] env[62109]: raise self.value [ 982.441858] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 982.441858] env[62109]: updated_port = self._update_port( [ 982.441858] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 982.441858] env[62109]: _ensure_no_port_binding_failure(port) [ 982.441858] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 982.441858] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 982.443159] env[62109]: nova.exception.PortBindingFailed: Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. [ 982.443159] env[62109]: Removing descriptor: 16 [ 982.443159] env[62109]: ERROR nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Traceback (most recent call last): [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] yield resources [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self.driver.spawn(context, instance, image_meta, [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 982.443159] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] vm_ref = self.build_virtual_machine(instance, [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] vif_infos = vmwarevif.get_vif_info(self._session, [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] for vif in network_info: [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] return self._sync_wrapper(fn, *args, **kwargs) [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self.wait() [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self[:] = self._gt.wait() [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] return self._exit_event.wait() [ 982.443753] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] result = hub.switch() [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] return self.greenlet.switch() [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] result = function(*args, **kwargs) [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] return func(*args, **kwargs) [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] raise e [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] nwinfo = self.network_api.allocate_for_instance( [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 982.444448] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] created_port_ids = self._update_ports_for_instance( [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] with excutils.save_and_reraise_exception(): [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self.force_reraise() [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] raise self.value [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] updated_port = self._update_port( [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] _ensure_no_port_binding_failure(port) [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 982.445133] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] raise exception.PortBindingFailed(port_id=port['id']) [ 982.445704] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] nova.exception.PortBindingFailed: Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. [ 982.445704] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] [ 982.445704] env[62109]: INFO nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Terminating instance [ 982.445704] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.635124] env[62109]: DEBUG nova.network.neutron [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 982.638196] env[62109]: DEBUG nova.compute.utils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.638812] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 1a9bca9edadd445993047fd60bd08936 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 982.641012] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 690679ce2513467f8403abe8ee6078bd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 982.642159] env[62109]: DEBUG nova.compute.manager [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Not allocating networking since 'none' was specified. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 982.650876] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a9bca9edadd445993047fd60bd08936 [ 982.653617] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 690679ce2513467f8403abe8ee6078bd [ 982.797765] env[62109]: DEBUG nova.network.neutron [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.798293] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] Expecting reply to msg ae2da71743c64dc5af8ba7df74bfeeb0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 982.806985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae2da71743c64dc5af8ba7df74bfeeb0 [ 983.143418] env[62109]: DEBUG nova.compute.manager [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 983.145161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 63c90b5a8aa34ded9fe219a60b8b952c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 983.199179] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63c90b5a8aa34ded9fe219a60b8b952c [ 983.269243] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1cc9b4a-2cc3-4568-8562-804592d520b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.283207] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71917120-0c2f-42c2-bc42-caf3ca89fd24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.323314] env[62109]: DEBUG oslo_concurrency.lockutils [req-d4d9f81a-6fbc-45a2-b81e-b59877041726 req-4d9effef-f5a8-40db-a6fe-ec597a8071b5 service nova] Releasing lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.325958] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.325958] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 983.326484] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg ba7c64f6474e4a2295e10abfc6604d0e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 983.329400] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a85490-5450-4b76-81af-b9238cd4c581 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.333900] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.334115] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.334544] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 18ea1a19b8de4d77a70727775442c645 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 983.335370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba7c64f6474e4a2295e10abfc6604d0e [ 983.342129] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1c48d7-63eb-43ec-853f-f473b25a7b1a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.346399] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18ea1a19b8de4d77a70727775442c645 [ 983.357849] env[62109]: DEBUG nova.compute.provider_tree [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.358344] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 0fce58a6c86e4f278be5367650cf398d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 983.367378] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fce58a6c86e4f278be5367650cf398d [ 983.656041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 2c31c8011e194ae0aa9708727330fd18 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 983.690121] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c31c8011e194ae0aa9708727330fd18 [ 983.837081] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 983.838824] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 6824412df12d4a51aa8d4a0159c9621d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 983.856317] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 983.860575] env[62109]: DEBUG nova.scheduler.client.report [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 983.863240] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 191b2b549e194d498c441e9892da9a7d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 983.885538] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 191b2b549e194d498c441e9892da9a7d [ 983.890508] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6824412df12d4a51aa8d4a0159c9621d [ 983.942934] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.943462] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg de453f7d26754564a0e259054ef02c83 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 983.953979] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de453f7d26754564a0e259054ef02c83 [ 984.035078] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Acquiring lock "681f4b00-1ed8-47fb-9117-aa3745096e66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.035436] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Lock "681f4b00-1ed8-47fb-9117-aa3745096e66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.036099] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 57d992f30f8649ae848e5c5c02f64e64 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.045149] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57d992f30f8649ae848e5c5c02f64e64 [ 984.137781] env[62109]: DEBUG nova.compute.manager [req-d30774d6-408a-4eb1-8a2b-a1aeed1d8859 req-a7426184-bda8-4c11-8a23-14d5a8669fe2 service nova] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Received event network-vif-deleted-d6902f58-e935-4bf3-9383-425e88f02b8a {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 984.155294] env[62109]: DEBUG nova.compute.manager [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 984.180232] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 984.180506] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 984.181105] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.181105] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 984.181395] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.181395] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 984.181792] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 984.181792] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 984.182087] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 984.182087] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 984.182501] env[62109]: DEBUG nova.virt.hardware [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 984.183664] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1070a472-ffc4-45de-b662-cfc7e81fe3d8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.192947] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f72433-ce53-43b7-a940-dec889fae154 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.208960] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.214670] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Creating folder: Project (f580a63417834f22aae91b47303194c0). Parent ref: group-v108864. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 984.215060] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1886052-bb08-49bb-ab64-06d677b34ed0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.230257] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Created folder: Project (f580a63417834f22aae91b47303194c0) in parent group-v108864. [ 984.230478] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Creating folder: Instances. Parent ref: group-v108893. {{(pid=62109) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 984.230749] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7dae8aa-cca7-4d40-8885-64ef887bf58e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.242757] env[62109]: INFO nova.virt.vmwareapi.vm_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Created folder: Instances in parent group-v108893. [ 984.243071] env[62109]: DEBUG oslo.service.loopingcall [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.243307] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 984.243549] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5d3d1ef-6f94-4b05-a065-053dcd5dd419 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.266694] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.266694] env[62109]: value = "task-401559" [ 984.266694] env[62109]: _type = "Task" [ 984.266694] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.277081] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401559, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.367180] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.237s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.368161] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 984.370799] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg c747a47f728d45c9bc7145ef43e0aa15 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.373236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.373656] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.477s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.376270] env[62109]: INFO nova.compute.claims [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.378862] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 8914122facfa4ffdbf0e38982cde838e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.414467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c747a47f728d45c9bc7145ef43e0aa15 [ 984.415890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8914122facfa4ffdbf0e38982cde838e [ 984.446375] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.446787] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 984.446988] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 984.447399] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df708cc2-e303-40b2-8a5e-5983dd0a82db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.457188] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.457573] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.458324] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 0fa5ba9901a14b89ab0e8fdae6813cf9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.461885] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc851b3c-a6cb-470c-9650-a393f3f221b7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.475862] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fa5ba9901a14b89ab0e8fdae6813cf9 [ 984.492857] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 138f2594-adbe-4ce2-a395-40fae312981b could not be found. [ 984.493487] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 984.494036] env[62109]: INFO nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 984.494498] env[62109]: DEBUG oslo.service.loopingcall [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.495323] env[62109]: DEBUG nova.compute.manager [-] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 984.495558] env[62109]: DEBUG nova.network.neutron [-] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 984.517526] env[62109]: DEBUG nova.network.neutron [-] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 984.518207] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg abcf8aa9013a4793bf43dbd6a528b8b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.529161] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abcf8aa9013a4793bf43dbd6a528b8b0 [ 984.537984] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 984.539891] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg e726ffcf7c3c47719437940893f17414 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.573083] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e726ffcf7c3c47719437940893f17414 [ 984.777931] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401559, 'name': CreateVM_Task, 'duration_secs': 0.291484} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.778127] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 984.778568] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.778713] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.779068] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 984.779313] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-427c60d9-18f8-4d99-9644-731ead1c89f8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.784611] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 984.784611] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]520d524e-a2af-df8b-2ca4-e7a34ccb66a3" [ 984.784611] env[62109]: _type = "Task" [ 984.784611] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.793595] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]520d524e-a2af-df8b-2ca4-e7a34ccb66a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.882704] env[62109]: DEBUG nova.compute.utils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 984.883368] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 6cf5e6f953e741958914f2cf6fa7a6ac in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.885558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 44542ae01cf64e2184610c70be36e232 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.886555] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 984.886716] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 984.901510] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44542ae01cf64e2184610c70be36e232 [ 984.905292] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cf5e6f953e741958914f2cf6fa7a6ac [ 984.950163] env[62109]: DEBUG nova.policy [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d514402d01f44c59eabf4ae0b14103c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de0340ebdce94ded876d6eb2769afbcc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 984.979069] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.979351] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Starting heal instance info cache {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 984.979548] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Rebuilding the list of instances to heal {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 984.980334] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 2e178c61cbbc4bdd8ad8ab0f1714426d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 984.995890] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e178c61cbbc4bdd8ad8ab0f1714426d [ 985.020678] env[62109]: DEBUG nova.network.neutron [-] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.021351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fe123fe516414037bf2764162b08aad8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 985.030816] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe123fe516414037bf2764162b08aad8 [ 985.061075] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.230911] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Successfully created port: 9f70bcb1-71b1-4cc5-8744-c080b2082474 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 985.296773] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]520d524e-a2af-df8b-2ca4-e7a34ccb66a3, 'name': SearchDatastore_Task, 'duration_secs': 0.010319} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.297070] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.297302] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.297532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.297674] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.297848] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.298180] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6a70a5f-9ae5-4ac5-b81b-5f892bd320e1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.307190] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.307389] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 985.308337] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-187c568d-fd96-47df-9b6f-3f9896188841 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.314587] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 985.314587] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52161c83-3ede-181f-16a9-f1b2e7a155b9" [ 985.314587] env[62109]: _type = "Task" [ 985.314587] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.322656] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52161c83-3ede-181f-16a9-f1b2e7a155b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.387492] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 985.389729] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 787d1790419041609dcfbd11cd33d96d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 985.437840] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 787d1790419041609dcfbd11cd33d96d [ 985.484182] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 985.484343] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 985.484473] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 985.484591] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 985.484754] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Skipping network cache update for instance because it is Building. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 985.484932] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Didn't find any instances for network info cache update. {{(pid=62109) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 985.487484] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.487899] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.488111] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.488247] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.488403] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.488592] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.488703] env[62109]: DEBUG nova.compute.manager [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62109) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 985.488831] env[62109]: DEBUG oslo_service.periodic_task [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62109) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.489186] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 31f12a72d68d40c2a5fc0b2b5719d1f9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 985.497939] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b4115a-0ba3-4aa9-9587-dbd0725619bd {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.500997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31f12a72d68d40c2a5fc0b2b5719d1f9 [ 985.507310] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7687b3ca-370e-419f-9c67-4e7bcd348721 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.538411] env[62109]: INFO nova.compute.manager [-] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Took 1.04 seconds to deallocate network for instance. [ 985.540995] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891ec56d-ff91-41c1-9b70-660a648774e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.543842] env[62109]: DEBUG nova.compute.claims [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 985.544034] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.550192] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6276c9cf-5223-4c95-a078-fe0cedc1b342 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.564954] env[62109]: DEBUG nova.compute.provider_tree [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.565557] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 190611ff5254499291bab762cf07f6a9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 985.573387] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 190611ff5254499291bab762cf07f6a9 [ 985.826736] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52161c83-3ede-181f-16a9-f1b2e7a155b9, 'name': SearchDatastore_Task, 'duration_secs': 0.011} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.827518] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b03af6a-cab0-4da4-a138-f70e2f4cdfc4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.834029] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 985.834029] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5233f0a7-de2f-0751-0b4a-5bfbfdf08186" [ 985.834029] env[62109]: _type = "Task" [ 985.834029] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.842181] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5233f0a7-de2f-0751-0b4a-5bfbfdf08186, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.896261] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 991aafd4feeb416a869d6f994bf8b251 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 985.929827] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 991aafd4feeb416a869d6f994bf8b251 [ 985.991956] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.068250] env[62109]: DEBUG nova.scheduler.client.report [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.071024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg da661dfa787a4609b7b22fb12cc5a48d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 986.084028] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da661dfa787a4609b7b22fb12cc5a48d [ 986.102325] env[62109]: ERROR nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. [ 986.102325] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 986.102325] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 986.102325] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 986.102325] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 986.102325] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 986.102325] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 986.102325] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 986.102325] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 986.102325] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 986.102325] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 986.102325] env[62109]: ERROR nova.compute.manager raise self.value [ 986.102325] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 986.102325] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 986.102325] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 986.102325] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 986.102916] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 986.102916] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 986.102916] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. [ 986.102916] env[62109]: ERROR nova.compute.manager [ 986.102916] env[62109]: Traceback (most recent call last): [ 986.102916] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 986.102916] env[62109]: listener.cb(fileno) [ 986.102916] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 986.102916] env[62109]: result = function(*args, **kwargs) [ 986.102916] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 986.102916] env[62109]: return func(*args, **kwargs) [ 986.102916] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 986.102916] env[62109]: raise e [ 986.102916] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 986.102916] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 986.102916] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 986.102916] env[62109]: created_port_ids = self._update_ports_for_instance( [ 986.102916] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 986.102916] env[62109]: with excutils.save_and_reraise_exception(): [ 986.102916] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 986.102916] env[62109]: self.force_reraise() [ 986.102916] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 986.102916] env[62109]: raise self.value [ 986.102916] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 986.102916] env[62109]: updated_port = self._update_port( [ 986.102916] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 986.102916] env[62109]: _ensure_no_port_binding_failure(port) [ 986.102916] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 986.102916] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 986.103841] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. [ 986.103841] env[62109]: Removing descriptor: 16 [ 986.162947] env[62109]: DEBUG nova.compute.manager [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Received event network-changed-9f70bcb1-71b1-4cc5-8744-c080b2082474 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 986.163066] env[62109]: DEBUG nova.compute.manager [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Refreshing instance network info cache due to event network-changed-9f70bcb1-71b1-4cc5-8744-c080b2082474. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 986.163316] env[62109]: DEBUG oslo_concurrency.lockutils [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] Acquiring lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.163399] env[62109]: DEBUG oslo_concurrency.lockutils [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] Acquired lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.163552] env[62109]: DEBUG nova.network.neutron [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Refreshing network info cache for port 9f70bcb1-71b1-4cc5-8744-c080b2082474 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 986.163966] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] Expecting reply to msg e5798073af654dadbaca896add187d3a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 986.170202] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5798073af654dadbaca896add187d3a [ 986.345387] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5233f0a7-de2f-0751-0b4a-5bfbfdf08186, 'name': SearchDatastore_Task, 'duration_secs': 0.009187} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.345756] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.345849] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308/b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 986.346152] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9595df40-1dd8-44ff-9f8f-dc5a89506c13 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.352711] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 986.352711] env[62109]: value = "task-401560" [ 986.352711] env[62109]: _type = "Task" [ 986.352711] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.361735] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401560, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.399091] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 986.426143] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.426410] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.426582] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.426842] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.427026] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.427201] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.427433] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.427613] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.427796] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.427984] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.428180] env[62109]: DEBUG nova.virt.hardware [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.429389] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b842d15-8fb5-4687-8342-cedc7c7fe576 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.438219] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb51d51-1dba-46f8-882d-95ac7feb8331 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.452646] env[62109]: ERROR nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Traceback (most recent call last): [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] yield resources [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self.driver.spawn(context, instance, image_meta, [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self._vmops.spawn(context, instance, image_meta, injected_files, [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] vm_ref = self.build_virtual_machine(instance, [ 986.452646] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] vif_infos = vmwarevif.get_vif_info(self._session, [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] for vif in network_info: [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] return self._sync_wrapper(fn, *args, **kwargs) [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self.wait() [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self[:] = self._gt.wait() [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] return self._exit_event.wait() [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 986.453122] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] current.throw(*self._exc) [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] result = function(*args, **kwargs) [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] return func(*args, **kwargs) [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] raise e [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] nwinfo = self.network_api.allocate_for_instance( [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] created_port_ids = self._update_ports_for_instance( [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] with excutils.save_and_reraise_exception(): [ 986.453609] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self.force_reraise() [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] raise self.value [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] updated_port = self._update_port( [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] _ensure_no_port_binding_failure(port) [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] raise exception.PortBindingFailed(port_id=port['id']) [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] nova.exception.PortBindingFailed: Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. [ 986.454038] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] [ 986.454038] env[62109]: INFO nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Terminating instance [ 986.455006] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Acquiring lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.574455] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.575234] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 986.578213] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 7d81c18d92874037ba3471369787c3f0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 986.579834] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.381s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.582790] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg c5ce9aa5c9234d3ca368dd921a8a8607 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 986.628104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d81c18d92874037ba3471369787c3f0 [ 986.630595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5ce9aa5c9234d3ca368dd921a8a8607 [ 986.682498] env[62109]: DEBUG nova.network.neutron [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 986.768124] env[62109]: DEBUG nova.network.neutron [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.768568] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] Expecting reply to msg 813d0da68e9c499c860f8f5c752379fc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 986.776746] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 813d0da68e9c499c860f8f5c752379fc [ 986.862376] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401560, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433716} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.862641] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308/b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 986.862851] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.863098] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83c9f1ad-4d78-45b0-b47b-8baec14b4a21 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.870520] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 986.870520] env[62109]: value = "task-401561" [ 986.870520] env[62109]: _type = "Task" [ 986.870520] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.878243] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401561, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.931798] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3289e237a37746a78825b6aa5799f778 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 986.941449] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3289e237a37746a78825b6aa5799f778 [ 987.086022] env[62109]: DEBUG nova.compute.utils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 987.086668] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 91bf35289eb946dd99f424323e341877 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 987.091103] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 987.091330] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 987.096028] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91bf35289eb946dd99f424323e341877 [ 987.132870] env[62109]: DEBUG nova.policy [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2883d8b8ab764050a13c8b3a56318c34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98934316e57a4ea69b2bb5a2f2aaf251', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 987.186735] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c0d6ff-966f-4f45-850e-429a6f2a4b7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.195163] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a6f12d-dcb9-49ef-9e8f-b9c82d3d135f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.231695] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128299c8-32dd-4649-823b-c70d7618dba9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.240263] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b700be-0f83-457c-a91b-ad5c74d96f37 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.254518] env[62109]: DEBUG nova.compute.provider_tree [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.255121] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 27405821fd76442e91e6793e598dff3c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 987.262584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27405821fd76442e91e6793e598dff3c [ 987.271081] env[62109]: DEBUG oslo_concurrency.lockutils [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] Releasing lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.271399] env[62109]: DEBUG nova.compute.manager [req-7aee4874-4ed4-4e83-a77d-a69075a93f4b req-6d17a66f-e8bd-4522-9ebf-53425cad1c6b service nova] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Received event network-vif-deleted-9f70bcb1-71b1-4cc5-8744-c080b2082474 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 987.272034] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Acquired lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.272264] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 987.272799] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg c2823798676949e58303f6b9314e7ce1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 987.278983] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2823798676949e58303f6b9314e7ce1 [ 987.381623] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401561, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069771} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.381973] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 987.382853] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d30d945-66a9-448c-8579-e76f801b36b5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.402628] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308/b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.402934] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-971225fe-fcd8-4f4b-965c-e02b2ae6324f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.423803] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 987.423803] env[62109]: value = "task-401562" [ 987.423803] env[62109]: _type = "Task" [ 987.423803] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.432168] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.437052] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Successfully created port: 347cd73f-5ec1-45b9-9ed9-a242e25415ac {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.591560] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 987.593556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 74a89e0193234812a247baab5cfa8a4f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 987.625312] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74a89e0193234812a247baab5cfa8a4f [ 987.757886] env[62109]: DEBUG nova.scheduler.client.report [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 987.760350] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg d0e822eade294fabb65ea75de79982a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 987.771985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0e822eade294fabb65ea75de79982a5 [ 987.789702] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 987.860206] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.860783] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 28e4a148882d4a4e94268330a3702d0b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 987.876365] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28e4a148882d4a4e94268330a3702d0b [ 987.935666] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401562, 'name': ReconfigVM_Task, 'duration_secs': 0.265725} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.935951] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Reconfigured VM instance instance-0000005d to attach disk [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308/b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.936694] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93a6cd3e-2768-4e98-91d1-3dd8fdc0cf7c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.943863] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 987.943863] env[62109]: value = "task-401563" [ 987.943863] env[62109]: _type = "Task" [ 987.943863] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.953147] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401563, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.099446] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg c9ba0a3075ec444b9ebae1289a9a751f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.135762] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9ba0a3075ec444b9ebae1289a9a751f [ 988.220751] env[62109]: DEBUG nova.compute.manager [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Received event network-changed-347cd73f-5ec1-45b9-9ed9-a242e25415ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 988.220751] env[62109]: DEBUG nova.compute.manager [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Refreshing instance network info cache due to event network-changed-347cd73f-5ec1-45b9-9ed9-a242e25415ac. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 988.220751] env[62109]: DEBUG oslo_concurrency.lockutils [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] Acquiring lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.220751] env[62109]: DEBUG oslo_concurrency.lockutils [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] Acquired lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.220751] env[62109]: DEBUG nova.network.neutron [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Refreshing network info cache for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 988.220964] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] Expecting reply to msg 808b0e6c22c24caaabc4896f7ddf82d6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.222377] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 808b0e6c22c24caaabc4896f7ddf82d6 [ 988.264028] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.683s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.264028] env[62109]: ERROR nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. [ 988.264028] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Traceback (most recent call last): [ 988.264028] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 988.264028] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self.driver.spawn(context, instance, image_meta, [ 988.264028] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 988.264028] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 988.264028] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 988.264028] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] vm_ref = self.build_virtual_machine(instance, [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] for vif in network_info: [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] return self._sync_wrapper(fn, *args, **kwargs) [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self.wait() [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self[:] = self._gt.wait() [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] return self._exit_event.wait() [ 988.264667] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] current.throw(*self._exc) [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] result = function(*args, **kwargs) [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] return func(*args, **kwargs) [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] raise e [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] nwinfo = self.network_api.allocate_for_instance( [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] created_port_ids = self._update_ports_for_instance( [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 988.265089] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] with excutils.save_and_reraise_exception(): [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] self.force_reraise() [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] raise self.value [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] updated_port = self._update_port( [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] _ensure_no_port_binding_failure(port) [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] raise exception.PortBindingFailed(port_id=port['id']) [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] nova.exception.PortBindingFailed: Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. [ 988.265508] env[62109]: ERROR nova.compute.manager [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] [ 988.267451] env[62109]: DEBUG nova.compute.utils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 988.268185] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Build of instance 37af2c2e-9c4e-445d-b128-c4c9137e73ca was re-scheduled: Binding failed for port 3e6eec16-c463-49ba-b551-ce1647747aad, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 988.269151] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 988.269499] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Acquiring lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.269763] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Acquired lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.270159] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 988.270709] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg c915a14b97214fe181a5b951fe8ba76a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.272113] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.899s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.278173] env[62109]: INFO nova.compute.claims [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.280092] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 927a5c66b27e4c9c920f978d04ca6368 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.282959] env[62109]: ERROR nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. [ 988.282959] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 988.282959] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 988.282959] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 988.282959] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 988.282959] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 988.282959] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 988.282959] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 988.282959] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 988.282959] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 988.282959] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 988.282959] env[62109]: ERROR nova.compute.manager raise self.value [ 988.282959] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 988.282959] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 988.282959] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 988.282959] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 988.283491] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 988.283491] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 988.283491] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. [ 988.283491] env[62109]: ERROR nova.compute.manager [ 988.283889] env[62109]: Traceback (most recent call last): [ 988.283985] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 988.283985] env[62109]: listener.cb(fileno) [ 988.284079] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 988.284079] env[62109]: result = function(*args, **kwargs) [ 988.284161] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 988.284161] env[62109]: return func(*args, **kwargs) [ 988.284233] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 988.284233] env[62109]: raise e [ 988.284306] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 988.284306] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 988.284440] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 988.284440] env[62109]: created_port_ids = self._update_ports_for_instance( [ 988.284521] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 988.284521] env[62109]: with excutils.save_and_reraise_exception(): [ 988.284597] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 988.284597] env[62109]: self.force_reraise() [ 988.284731] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 988.284731] env[62109]: raise self.value [ 988.284808] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 988.284808] env[62109]: updated_port = self._update_port( [ 988.284883] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 988.284883] env[62109]: _ensure_no_port_binding_failure(port) [ 988.284958] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 988.284958] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 988.285040] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. [ 988.285098] env[62109]: Removing descriptor: 16 [ 988.285607] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c915a14b97214fe181a5b951fe8ba76a [ 988.321314] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 927a5c66b27e4c9c920f978d04ca6368 [ 988.367948] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Releasing lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.368742] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 988.369057] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 988.369466] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1474229c-e494-4b90-a124-8f51ed6de1e5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.379491] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020d6b28-f7fa-4de7-966f-10ac4af40450 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.403350] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 121dbfda-87d9-4733-a7d2-3ffa6f54df36 could not be found. [ 988.404043] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 988.404353] env[62109]: INFO nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Took 0.04 seconds to destroy the instance on the hypervisor. [ 988.404728] env[62109]: DEBUG oslo.service.loopingcall [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.405076] env[62109]: DEBUG nova.compute.manager [-] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 988.405383] env[62109]: DEBUG nova.network.neutron [-] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 988.421419] env[62109]: DEBUG nova.network.neutron [-] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 988.422049] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0c42b010c23241d49de1cacb297dde83 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.429068] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c42b010c23241d49de1cacb297dde83 [ 988.455013] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401563, 'name': Rename_Task, 'duration_secs': 0.137399} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.455511] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 988.455896] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59e1b1a0-fa7d-4b66-ac75-2669096893f9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.463385] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 988.463385] env[62109]: value = "task-401564" [ 988.463385] env[62109]: _type = "Task" [ 988.463385] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.479036] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.603062] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 988.627425] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 988.627663] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 988.627814] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.627989] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 988.628256] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.628413] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 988.628623] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 988.628775] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 988.628939] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 988.629124] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 988.629303] env[62109]: DEBUG nova.virt.hardware [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 988.630188] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28137d01-bf73-4d4b-9422-40cd780a61ec {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.639155] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53287f7-147a-478d-ae80-d0d7536c1671 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.653284] env[62109]: ERROR nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Traceback (most recent call last): [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] yield resources [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self.driver.spawn(context, instance, image_meta, [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] vm_ref = self.build_virtual_machine(instance, [ 988.653284] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] vif_infos = vmwarevif.get_vif_info(self._session, [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] for vif in network_info: [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] return self._sync_wrapper(fn, *args, **kwargs) [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self.wait() [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self[:] = self._gt.wait() [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] return self._exit_event.wait() [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 988.653745] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] current.throw(*self._exc) [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] result = function(*args, **kwargs) [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] return func(*args, **kwargs) [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] raise e [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] nwinfo = self.network_api.allocate_for_instance( [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] created_port_ids = self._update_ports_for_instance( [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] with excutils.save_and_reraise_exception(): [ 988.654153] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self.force_reraise() [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] raise self.value [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] updated_port = self._update_port( [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] _ensure_no_port_binding_failure(port) [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] raise exception.PortBindingFailed(port_id=port['id']) [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] nova.exception.PortBindingFailed: Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. [ 988.654582] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] [ 988.654582] env[62109]: INFO nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Terminating instance [ 988.655742] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.735614] env[62109]: DEBUG nova.network.neutron [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 988.783685] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg d8b2e2ba81024718ac8c4af7190e556c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.789455] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 988.791892] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8b2e2ba81024718ac8c4af7190e556c [ 988.822688] env[62109]: DEBUG nova.network.neutron [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.823214] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] Expecting reply to msg f7a175feb3a6469b930d52a75775b79b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.831207] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7a175feb3a6469b930d52a75775b79b [ 988.862566] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.863091] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 2fb7ebe56e1f4a409af8a5fd04120ac1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.870963] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fb7ebe56e1f4a409af8a5fd04120ac1 [ 988.923823] env[62109]: DEBUG nova.network.neutron [-] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.924272] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cd097360393948218fa6dd41e89be081 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 988.932206] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd097360393948218fa6dd41e89be081 [ 988.975010] env[62109]: DEBUG oslo_vmware.api [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401564, 'name': PowerOnVM_Task, 'duration_secs': 0.481125} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.975294] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 988.975495] env[62109]: INFO nova.compute.manager [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Took 4.82 seconds to spawn the instance on the hypervisor. [ 988.975682] env[62109]: DEBUG nova.compute.manager [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 988.976735] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be023aa-ba47-42b1-8c6a-ca1de1672930 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.984231] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg b20d0c6c824640f6a62df0949033040f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 989.013928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b20d0c6c824640f6a62df0949033040f [ 989.327880] env[62109]: DEBUG oslo_concurrency.lockutils [req-b2b363a8-fcb7-480a-ad88-f690d112c696 req-347a1514-b16d-4b6b-9f92-562ae1c0e78b service nova] Releasing lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.328383] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquired lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.328563] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 989.329001] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 6eacc66f752544c1a9a74a136a75060e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 989.338024] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6eacc66f752544c1a9a74a136a75060e [ 989.366925] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Releasing lock "refresh_cache-37af2c2e-9c4e-445d-b128-c4c9137e73ca" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.366925] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 989.366925] env[62109]: DEBUG nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 989.366925] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 989.380143] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 989.380857] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 1ca73f47a61549f28ef08ce45ca68802 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 989.389040] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ca73f47a61549f28ef08ce45ca68802 [ 989.394499] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c139c5e8-58f9-47bd-a293-79179d2c5ffe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.402881] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423f1102-89d3-4d4f-969f-36fedacc8a07 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.433163] env[62109]: INFO nova.compute.manager [-] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Took 1.03 seconds to deallocate network for instance. [ 989.435814] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6181fea0-5b94-4013-b653-c4a681042f5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.439055] env[62109]: DEBUG nova.compute.claims [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 989.439370] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.445130] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0836ba81-e6c8-4ec7-92eb-b4a509740385 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.458925] env[62109]: DEBUG nova.compute.provider_tree [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.459548] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 3be71c81cf9f4545a266b21b19d53657 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 989.468669] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3be71c81cf9f4545a266b21b19d53657 [ 989.491358] env[62109]: INFO nova.compute.manager [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Took 22.12 seconds to build instance. [ 989.492501] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 4526cb8ab4814c6aabb98997c3e145c7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 989.505104] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4526cb8ab4814c6aabb98997c3e145c7 [ 989.542965] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 019357ef838647afacaa74f801cc0425 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 989.556409] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 019357ef838647afacaa74f801cc0425 [ 990.423436] env[62109]: DEBUG nova.network.neutron [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.424221] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg f922bffa9c2e4e5db64c6906a3025249 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 990.425693] env[62109]: DEBUG nova.scheduler.client.report [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 990.428479] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a058be6c7746465abf6415da6a79a630 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 990.429607] env[62109]: DEBUG oslo_concurrency.lockutils [None req-435e61e1-1cc4-4f2c-a279-d678d754622b tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.066s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.430244] env[62109]: INFO nova.compute.manager [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Rebuilding instance [ 990.433187] env[62109]: DEBUG nova.compute.manager [req-262135e4-3ca7-4216-87de-b9bd5fb5e3d8 req-54ecafab-8ddb-46af-92e0-f74a841fad95 service nova] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Received event network-vif-deleted-347cd73f-5ec1-45b9-9ed9-a242e25415ac {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 990.450088] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a058be6c7746465abf6415da6a79a630 [ 990.462724] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f922bffa9c2e4e5db64c6906a3025249 [ 990.465692] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 990.480841] env[62109]: DEBUG nova.compute.manager [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 990.482044] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6041e5e-dd49-4fd9-bd34-0c6c2f51d103 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.490726] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 4fdd729e50214834b696716252121036 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 990.521874] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fdd729e50214834b696716252121036 [ 990.563818] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.564539] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg c204b4ef8b824bd6b756d08ee47c5f99 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 990.574973] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c204b4ef8b824bd6b756d08ee47c5f99 [ 990.936054] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.936223] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 990.937898] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg e4e27a3b667445519a77d8681448739f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 990.939101] env[62109]: INFO nova.compute.manager [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] [instance: 37af2c2e-9c4e-445d-b128-c4c9137e73ca] Took 1.57 seconds to deallocate network for instance. [ 990.940692] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg dab0f20a8aa04bb689f1ca559a136d03 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 990.941737] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.881s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.943048] env[62109]: INFO nova.compute.claims [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.944501] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 430336396ac54c289ca6b7e0032406f3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 990.974861] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dab0f20a8aa04bb689f1ca559a136d03 [ 990.984698] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 430336396ac54c289ca6b7e0032406f3 [ 990.991595] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4e27a3b667445519a77d8681448739f [ 990.993944] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 990.994411] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52a63395-90ba-4449-97bb-ace7816d0af0 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.002760] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 991.002760] env[62109]: value = "task-401565" [ 991.002760] env[62109]: _type = "Task" [ 991.002760] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.011639] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.068536] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Releasing lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.068905] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.069092] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.069381] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e6afc84-6e78-458a-a25f-9e09dc28bc11 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.079071] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9815ea-b763-4f09-93a3-9bb08e47ccbb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.103160] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2e484358-0037-41b0-bf66-534fc7116d34 could not be found. [ 991.103433] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 991.103621] env[62109]: INFO nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Took 0.03 seconds to destroy the instance on the hypervisor. [ 991.103859] env[62109]: DEBUG oslo.service.loopingcall [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.104086] env[62109]: DEBUG nova.compute.manager [-] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 991.104182] env[62109]: DEBUG nova.network.neutron [-] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 991.119595] env[62109]: DEBUG nova.network.neutron [-] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 991.120141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e719ac2dfddd4780865417755cd523d8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 991.127423] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e719ac2dfddd4780865417755cd523d8 [ 991.443427] env[62109]: DEBUG nova.compute.utils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 991.444207] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg b8c33017f2b94c1cace61dfa6610aa73 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 991.445689] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 991.445689] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 991.450506] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg aaea9de5f7e6447e9ceedb100edcb506 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 991.452751] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 62e4a0b12f684566b995cfad3417ae0b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 991.459083] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8c33017f2b94c1cace61dfa6610aa73 [ 991.459999] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62e4a0b12f684566b995cfad3417ae0b [ 991.479435] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aaea9de5f7e6447e9ceedb100edcb506 [ 991.490006] env[62109]: DEBUG nova.policy [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba5969be1a254281b4dffd81aa84ae7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa59611b27bb41beb282e68ccfa6fadc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 991.512137] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401565, 'name': PowerOffVM_Task, 'duration_secs': 0.191701} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.512650] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 991.512928] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.513657] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6c5942-b87c-45a8-ae8e-7c6161227c48 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.520486] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 991.520701] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f08756d6-84e8-48b4-83b2-976917d5b498 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.548967] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 991.549243] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 991.549463] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Deleting the datastore file [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.549773] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60c8bbb4-8c92-48bf-b65c-888490bb4ed8 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.557125] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 991.557125] env[62109]: value = "task-401567" [ 991.557125] env[62109]: _type = "Task" [ 991.557125] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.565671] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.621795] env[62109]: DEBUG nova.network.neutron [-] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.622365] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e0cd0fe063f04347bc52fee8e38e4078 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 991.631194] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0cd0fe063f04347bc52fee8e38e4078 [ 991.750507] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Successfully created port: 9a962537-f015-4cde-9316-5978f15b14e0 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.949243] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 991.950956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 2c05b53bbc9b43cc91bb982ff0114458 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 991.974616] env[62109]: INFO nova.scheduler.client.report [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Deleted allocations for instance 37af2c2e-9c4e-445d-b128-c4c9137e73ca [ 991.982999] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Expecting reply to msg 5e151f25e74f47c6aec9e63bcfc09e02 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 991.990205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c05b53bbc9b43cc91bb982ff0114458 [ 992.002043] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e151f25e74f47c6aec9e63bcfc09e02 [ 992.045407] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51f539b-8f56-4c5b-b24b-f2eaf7fd73c3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.053281] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79241fc8-29f3-4139-9877-4d6d3350290a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.084388] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f617ed59-9763-4ded-bb0c-4623e8530464 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.089394] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096343} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.089936] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.090119] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 992.090292] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.091808] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg a44568d0655a4547a13f07db2329312f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 992.095541] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4181823f-60c0-44d9-bd48-bdf15b581c59 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.109720] env[62109]: DEBUG nova.compute.provider_tree [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.110193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg f0573e7fcbd84802850f3f799f4c13f1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 992.117271] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0573e7fcbd84802850f3f799f4c13f1 [ 992.120819] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a44568d0655a4547a13f07db2329312f [ 992.124628] env[62109]: INFO nova.compute.manager [-] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Took 1.02 seconds to deallocate network for instance. [ 992.126798] env[62109]: DEBUG nova.compute.claims [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 992.126966] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.413379] env[62109]: DEBUG nova.compute.manager [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Received event network-changed-9a962537-f015-4cde-9316-5978f15b14e0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 992.413578] env[62109]: DEBUG nova.compute.manager [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Refreshing instance network info cache due to event network-changed-9a962537-f015-4cde-9316-5978f15b14e0. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 992.413812] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] Acquiring lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.414020] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] Acquired lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.414402] env[62109]: DEBUG nova.network.neutron [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Refreshing network info cache for port 9a962537-f015-4cde-9316-5978f15b14e0 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 992.414719] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] Expecting reply to msg 2a5e587337d64be58975bebd45a85e94 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 992.421825] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a5e587337d64be58975bebd45a85e94 [ 992.455413] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 70f5f008a18346639ed9568a241b6344 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 992.484032] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70f5f008a18346639ed9568a241b6344 [ 992.491355] env[62109]: DEBUG oslo_concurrency.lockutils [None req-ca8558f0-4a71-434d-b657-9aaba1e21650 tempest-ServerActionsV293TestJSON-1520280389 tempest-ServerActionsV293TestJSON-1520280389-project-member] Lock "37af2c2e-9c4e-445d-b128-c4c9137e73ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.774s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.597489] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg b8d061a756ed4e53b67157f3db40eaf0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 992.611695] env[62109]: ERROR nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. [ 992.611695] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 992.611695] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 992.611695] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 992.611695] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 992.611695] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 992.611695] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 992.611695] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 992.611695] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.611695] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 992.611695] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.611695] env[62109]: ERROR nova.compute.manager raise self.value [ 992.611695] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 992.611695] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 992.611695] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 992.611695] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 992.612340] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 992.612340] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 992.612340] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. [ 992.612340] env[62109]: ERROR nova.compute.manager [ 992.612340] env[62109]: Traceback (most recent call last): [ 992.612340] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 992.612340] env[62109]: listener.cb(fileno) [ 992.612340] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 992.612340] env[62109]: result = function(*args, **kwargs) [ 992.612340] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 992.612340] env[62109]: return func(*args, **kwargs) [ 992.612340] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 992.612340] env[62109]: raise e [ 992.612340] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 992.612340] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 992.612340] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 992.612340] env[62109]: created_port_ids = self._update_ports_for_instance( [ 992.612340] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 992.612340] env[62109]: with excutils.save_and_reraise_exception(): [ 992.612340] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.612340] env[62109]: self.force_reraise() [ 992.612340] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.612340] env[62109]: raise self.value [ 992.612340] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 992.612340] env[62109]: updated_port = self._update_port( [ 992.612340] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 992.612340] env[62109]: _ensure_no_port_binding_failure(port) [ 992.612340] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 992.612340] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 992.613259] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. [ 992.613259] env[62109]: Removing descriptor: 19 [ 992.613259] env[62109]: DEBUG nova.scheduler.client.report [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 992.615189] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 9cf5eff6b66b438595cd1346d6aa500d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 992.635660] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cf5eff6b66b438595cd1346d6aa500d [ 992.636360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8d061a756ed4e53b67157f3db40eaf0 [ 992.930592] env[62109]: DEBUG nova.network.neutron [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 992.957990] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 992.983096] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 992.983396] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 992.983590] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.983813] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 992.983992] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.984193] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 992.984435] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 992.984628] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 992.984831] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 992.985028] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 992.985241] env[62109]: DEBUG nova.virt.hardware [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 992.986111] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54395dda-fcae-40c3-a18c-7094ee960de3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.994418] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdee7db-81fd-44f5-b8fc-250c9e99444e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.008358] env[62109]: ERROR nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Traceback (most recent call last): [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] yield resources [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self.driver.spawn(context, instance, image_meta, [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] vm_ref = self.build_virtual_machine(instance, [ 993.008358] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] for vif in network_info: [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] return self._sync_wrapper(fn, *args, **kwargs) [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self.wait() [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self[:] = self._gt.wait() [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] return self._exit_event.wait() [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 993.008946] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] current.throw(*self._exc) [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] result = function(*args, **kwargs) [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] return func(*args, **kwargs) [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] raise e [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] nwinfo = self.network_api.allocate_for_instance( [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] created_port_ids = self._update_ports_for_instance( [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] with excutils.save_and_reraise_exception(): [ 993.009376] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self.force_reraise() [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] raise self.value [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] updated_port = self._update_port( [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] _ensure_no_port_binding_failure(port) [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] raise exception.PortBindingFailed(port_id=port['id']) [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] nova.exception.PortBindingFailed: Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. [ 993.009767] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] [ 993.009767] env[62109]: INFO nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Terminating instance [ 993.011458] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.016775] env[62109]: DEBUG nova.network.neutron [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.017263] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] Expecting reply to msg 51d76f61bb6f4d7b96f7512b6f3e69f2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 993.026756] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51d76f61bb6f4d7b96f7512b6f3e69f2 [ 993.117218] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.117662] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 993.119342] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 68dd251791be449693f288b50430fdd2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 993.122659] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 993.122877] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 993.123033] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.123206] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 993.123346] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.123484] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 993.123677] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 993.123828] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 993.123986] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 993.124155] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 993.124322] env[62109]: DEBUG nova.virt.hardware [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 993.124614] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.581s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.126332] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 94a80c3fc57742f592646bc14bff2f0f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 993.127792] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e706fe-3342-4dc1-bd51-7dc6dc29dcfe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.136670] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376553b6-fd25-47fd-ad1d-2592cfaf5a1d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.153266] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Instance VIF info [] {{(pid=62109) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 993.158404] env[62109]: DEBUG oslo.service.loopingcall [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.159063] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68dd251791be449693f288b50430fdd2 [ 993.159474] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Creating VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 993.159692] env[62109]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f23021d-8d95-46cf-ae91-c1f4c5bd3daa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.171714] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94a80c3fc57742f592646bc14bff2f0f [ 993.178775] env[62109]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 993.178775] env[62109]: value = "task-401569" [ 993.178775] env[62109]: _type = "Task" [ 993.178775] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.186843] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401569, 'name': CreateVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.519132] env[62109]: DEBUG oslo_concurrency.lockutils [req-4f413225-42f1-4bca-89ec-2ede67aee829 req-23c7d2e2-fa43-4edf-9251-1ef60ad65f2d service nova] Releasing lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.519546] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.519722] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 993.520182] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 8aab676cbeaa432db68cca65c8768aef in queue reply_7522b64acfeb4981b1f36928b040d568 [ 993.529651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aab676cbeaa432db68cca65c8768aef [ 993.635427] env[62109]: DEBUG nova.compute.utils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 993.636053] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 3b4bbbca71f04622a2ca35ca06d4f467 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 993.637189] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 993.637339] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 993.648375] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b4bbbca71f04622a2ca35ca06d4f467 [ 993.676513] env[62109]: DEBUG nova.policy [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '299130133b5546bdb3dec4e7203533db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f97e7c0258f74b54aeed6c19db7c8cfe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 993.696702] env[62109]: DEBUG oslo_vmware.api [-] Task: {'id': task-401569, 'name': CreateVM_Task, 'duration_secs': 0.279042} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.696892] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Created VM on the ESX host {{(pid=62109) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 993.697317] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.697476] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.697792] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 993.698069] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19be1b70-0ecc-4d5c-9de6-5401dd53058a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.705909] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 993.705909] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]522d2b03-f511-ad6b-a61f-fc2471231335" [ 993.705909] env[62109]: _type = "Task" [ 993.705909] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.715791] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]522d2b03-f511-ad6b-a61f-fc2471231335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.820216] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7d1f5f-8fab-4fdf-9c80-b640b7997915 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.828027] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e77f677-80c0-4963-ad7e-2db391bf3f63 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.866382] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba6cf1c-acbc-4621-828d-890a9c29f94b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.874396] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc578f95-02a0-46e5-bf5c-34cf213018fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.897471] env[62109]: DEBUG nova.compute.provider_tree [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.898042] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 0e5aef480df74adeae757412a9bf055c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 993.904666] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e5aef480df74adeae757412a9bf055c [ 993.927276] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Successfully created port: 6ffa751a-5526-4526-875d-e0756fac6200 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 994.040417] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 994.140510] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 994.142230] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 6fd4dcaa8f9a4446a093509a32c684fd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 994.183385] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fd4dcaa8f9a4446a093509a32c684fd [ 994.200630] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.201184] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg d6f33ec2d3914634bdf0d5ae787f8106 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 994.210869] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6f33ec2d3914634bdf0d5ae787f8106 [ 994.218155] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]522d2b03-f511-ad6b-a61f-fc2471231335, 'name': SearchDatastore_Task, 'duration_secs': 0.012602} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.218441] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.218662] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Processing image 4800b6ec-9841-4c82-b42e-97cce3beeec5 {{(pid=62109) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 994.218877] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.219010] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.219173] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 994.219420] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ce3ca64-0a89-49c5-ac99-f83ca260323f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.230780] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62109) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.231025] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62109) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 994.232033] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5223c6c5-137f-4733-bc88-ab22bd71bcfe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.238983] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 994.238983] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]52678e67-94ca-e7bb-df37-557cd6d16bf4" [ 994.238983] env[62109]: _type = "Task" [ 994.238983] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.249690] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52678e67-94ca-e7bb-df37-557cd6d16bf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.402290] env[62109]: DEBUG nova.scheduler.client.report [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 994.405015] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg b7f3e76900c74265a1d119407c892424 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 994.416531] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7f3e76900c74265a1d119407c892424 [ 994.446637] env[62109]: DEBUG nova.compute.manager [req-6f53c3bb-1c03-41d0-906a-16f4aa59c9c6 req-d4bf12d9-f969-42d6-a078-c1c19891d5b8 service nova] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Received event network-vif-deleted-9a962537-f015-4cde-9316-5978f15b14e0 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 994.648928] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 861d5632b90b47eba3083583fdfd42ee in queue reply_7522b64acfeb4981b1f36928b040d568 [ 994.703279] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.703732] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 994.703926] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 994.704621] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 861d5632b90b47eba3083583fdfd42ee [ 994.705209] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a4a2cc2-a824-4855-ba24-7bb597784097 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.715813] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f129551-409d-4e7c-ad19-57c8c9cb477d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.739902] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf could not be found. [ 994.740157] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 994.740343] env[62109]: INFO nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 994.740601] env[62109]: DEBUG oslo.service.loopingcall [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.743706] env[62109]: DEBUG nova.compute.manager [-] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 994.743807] env[62109]: DEBUG nova.network.neutron [-] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 994.751348] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]52678e67-94ca-e7bb-df37-557cd6d16bf4, 'name': SearchDatastore_Task, 'duration_secs': 0.009383} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.752083] env[62109]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1250668e-6916-4c10-ad34-a7b33d9efae9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.761537] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 994.761537] env[62109]: value = "session[52179c02-c015-3130-00ae-1f82359b65ea]5267a39b-01a2-6dc8-0ba4-8e2dd87daae4" [ 994.761537] env[62109]: _type = "Task" [ 994.761537] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.766822] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5267a39b-01a2-6dc8-0ba4-8e2dd87daae4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.767724] env[62109]: DEBUG nova.network.neutron [-] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 994.768411] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dbc5bd3aa49b4498beccdd668687be82 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 994.775857] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbc5bd3aa49b4498beccdd668687be82 [ 994.908166] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.783s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.908868] env[62109]: ERROR nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Traceback (most recent call last): [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self.driver.spawn(context, instance, image_meta, [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] vm_ref = self.build_virtual_machine(instance, [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] vif_infos = vmwarevif.get_vif_info(self._session, [ 994.908868] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] for vif in network_info: [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] return self._sync_wrapper(fn, *args, **kwargs) [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self.wait() [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self[:] = self._gt.wait() [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] return self._exit_event.wait() [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] result = hub.switch() [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 994.909346] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] return self.greenlet.switch() [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] result = function(*args, **kwargs) [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] return func(*args, **kwargs) [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] raise e [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] nwinfo = self.network_api.allocate_for_instance( [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] created_port_ids = self._update_ports_for_instance( [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] with excutils.save_and_reraise_exception(): [ 994.909836] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] self.force_reraise() [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] raise self.value [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] updated_port = self._update_port( [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] _ensure_no_port_binding_failure(port) [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] raise exception.PortBindingFailed(port_id=port['id']) [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] nova.exception.PortBindingFailed: Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. [ 994.910318] env[62109]: ERROR nova.compute.manager [instance: 138f2594-adbe-4ce2-a395-40fae312981b] [ 994.911020] env[62109]: DEBUG nova.compute.utils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 994.911020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.919s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.911020] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.911159] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62109) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 994.911840] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.472s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.913887] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg a200461e05ba4b71813990f515f5f2d7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 994.915158] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Build of instance 138f2594-adbe-4ce2-a395-40fae312981b was re-scheduled: Binding failed for port d6902f58-e935-4bf3-9383-425e88f02b8a, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 994.915648] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 994.915908] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.916101] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.916310] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 994.916755] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg c51fc361f1a24f5bac5bbb78c6fc3b94 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 994.918053] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6ef169-c7a9-4758-af29-2bdd39742ea9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.923163] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c51fc361f1a24f5bac5bbb78c6fc3b94 [ 994.927754] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b170bf-3e0c-459d-9b7d-5911ccc94707 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.947415] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bbebb8-ca13-48df-a115-50cb9b6798fb {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.951258] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a200461e05ba4b71813990f515f5f2d7 [ 994.955298] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b82c458-b491-44ea-b89a-5bd67674b996 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.991080] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181545MB free_disk=124GB free_vcpus=48 pci_devices=None {{(pid=62109) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 994.991285] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.992510] env[62109]: ERROR nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. [ 994.992510] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 994.992510] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 994.992510] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 994.992510] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 994.992510] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 994.992510] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 994.992510] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 994.992510] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 994.992510] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 994.992510] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 994.992510] env[62109]: ERROR nova.compute.manager raise self.value [ 994.992510] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 994.992510] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 994.992510] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 994.992510] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 994.993300] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 994.993300] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 994.993300] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. [ 994.993300] env[62109]: ERROR nova.compute.manager [ 994.993300] env[62109]: Traceback (most recent call last): [ 994.993300] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 994.993300] env[62109]: listener.cb(fileno) [ 994.993300] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 994.993300] env[62109]: result = function(*args, **kwargs) [ 994.993300] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 994.993300] env[62109]: return func(*args, **kwargs) [ 994.993300] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 994.993300] env[62109]: raise e [ 994.993300] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 994.993300] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 994.993300] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 994.993300] env[62109]: created_port_ids = self._update_ports_for_instance( [ 994.993300] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 994.993300] env[62109]: with excutils.save_and_reraise_exception(): [ 994.993300] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 994.993300] env[62109]: self.force_reraise() [ 994.993300] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 994.993300] env[62109]: raise self.value [ 994.993300] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 994.993300] env[62109]: updated_port = self._update_port( [ 994.993300] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 994.993300] env[62109]: _ensure_no_port_binding_failure(port) [ 994.993300] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 994.993300] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 994.994572] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. [ 994.994572] env[62109]: Removing descriptor: 19 [ 995.155784] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 995.183238] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 995.183539] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 995.183732] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.183952] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 995.184185] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.184369] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 995.184616] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 995.184810] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 995.185012] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 995.185210] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 995.185417] env[62109]: DEBUG nova.virt.hardware [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 995.186332] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bae372-3fa6-4b7b-87a2-2ad5a04bc138 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.194878] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ef90ff-a8e3-430c-9991-60dcedee2f29 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.208307] env[62109]: ERROR nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Traceback (most recent call last): [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] yield resources [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self.driver.spawn(context, instance, image_meta, [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self._vmops.spawn(context, instance, image_meta, injected_files, [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] vm_ref = self.build_virtual_machine(instance, [ 995.208307] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] vif_infos = vmwarevif.get_vif_info(self._session, [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] for vif in network_info: [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] return self._sync_wrapper(fn, *args, **kwargs) [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self.wait() [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self[:] = self._gt.wait() [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] return self._exit_event.wait() [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 995.208735] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] current.throw(*self._exc) [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] result = function(*args, **kwargs) [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] return func(*args, **kwargs) [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] raise e [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] nwinfo = self.network_api.allocate_for_instance( [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] created_port_ids = self._update_ports_for_instance( [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] with excutils.save_and_reraise_exception(): [ 995.209214] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self.force_reraise() [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] raise self.value [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] updated_port = self._update_port( [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] _ensure_no_port_binding_failure(port) [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] raise exception.PortBindingFailed(port_id=port['id']) [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] nova.exception.PortBindingFailed: Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. [ 995.209664] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] [ 995.209664] env[62109]: INFO nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Terminating instance [ 995.211169] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Acquiring lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.211367] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Acquired lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.211567] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 995.212014] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 067a278a384a496985644855ef37fafc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 995.218395] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 067a278a384a496985644855ef37fafc [ 995.269089] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': session[52179c02-c015-3130-00ae-1f82359b65ea]5267a39b-01a2-6dc8-0ba4-8e2dd87daae4, 'name': SearchDatastore_Task, 'duration_secs': 0.010123} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.269502] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.269791] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308/b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 995.270116] env[62109]: DEBUG nova.network.neutron [-] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.270588] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e429ff31a0a6447a98278f1e0f732960 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 995.271361] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f8d8765-4f5e-4a00-aef9-494e69da2e8f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.278991] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 995.278991] env[62109]: value = "task-401570" [ 995.278991] env[62109]: _type = "Task" [ 995.278991] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.282560] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e429ff31a0a6447a98278f1e0f732960 [ 995.287887] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.442722] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 995.530201] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18332ea7-a8e9-4f66-98bd-cbc043b51dc2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.540502] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.541020] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 38b5657e3dc141f98f58db564a735a5d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 995.543251] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a47e230-2f61-4661-bf7f-72f22219d66c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.550154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38b5657e3dc141f98f58db564a735a5d [ 995.575521] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e329ab0-886c-4f06-8c1d-0c5ad5a41de1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.586308] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de68538a-a891-4cf4-b11f-a5fa00152ce2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.603115] env[62109]: DEBUG nova.compute.provider_tree [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.603627] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg a57c4a37b1564dbebe0827b89ac82ebf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 995.611454] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a57c4a37b1564dbebe0827b89ac82ebf [ 995.740614] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 995.774545] env[62109]: INFO nova.compute.manager [-] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Took 1.03 seconds to deallocate network for instance. [ 995.776920] env[62109]: DEBUG nova.compute.claims [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 995.777363] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.789991] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401570, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476606} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.792290] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4800b6ec-9841-4c82-b42e-97cce3beeec5/4800b6ec-9841-4c82-b42e-97cce3beeec5.vmdk to [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308/b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308.vmdk {{(pid=62109) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 995.792499] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Extending root virtual disk to 1048576 {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.792749] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49a5fd8c-92fe-49da-8062-3d81baa31823 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.800206] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 995.800206] env[62109]: value = "task-401571" [ 995.800206] env[62109]: _type = "Task" [ 995.800206] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.810038] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401571, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.837521] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.838310] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 1392b2f21df74b53b5bfd611eed5135c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 995.848277] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1392b2f21df74b53b5bfd611eed5135c [ 996.047446] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-138f2594-adbe-4ce2-a395-40fae312981b" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.047784] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 996.047982] env[62109]: DEBUG nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 996.048391] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 996.062628] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 996.063181] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 6eae110b25684b6d8ca68c9561b4e868 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 996.070508] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6eae110b25684b6d8ca68c9561b4e868 [ 996.106639] env[62109]: DEBUG nova.scheduler.client.report [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 996.109075] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 3ec2af3284bd41d7b8562126f6a14899 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 996.119422] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ec2af3284bd41d7b8562126f6a14899 [ 996.310816] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401571, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064938} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.311072] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Extended root virtual disk {{(pid=62109) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.311814] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6815e563-9a69-42ef-a0bf-106d2e1ef69c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.331808] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308/b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.332053] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5630ce5c-d563-459a-95c4-a734e923607b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.345798] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Releasing lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.346202] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 996.346465] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 996.346703] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a48b98a5-80b4-4aeb-b79e-5994a798ca93 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.354973] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc50d6c6-d902-49cd-b2a6-f460012d5497 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.365755] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 996.365755] env[62109]: value = "task-401572" [ 996.365755] env[62109]: _type = "Task" [ 996.365755] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.374317] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.379305] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 681f4b00-1ed8-47fb-9117-aa3745096e66 could not be found. [ 996.379506] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 996.379680] env[62109]: INFO nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Took 0.03 seconds to destroy the instance on the hypervisor. [ 996.379924] env[62109]: DEBUG oslo.service.loopingcall [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 996.380162] env[62109]: DEBUG nova.compute.manager [-] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 996.380254] env[62109]: DEBUG nova.network.neutron [-] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 996.397797] env[62109]: DEBUG nova.network.neutron [-] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 996.398370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e2c48adf9e524e9dad0d4a3f2577f864 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 996.406083] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2c48adf9e524e9dad0d4a3f2577f864 [ 996.474246] env[62109]: DEBUG nova.compute.manager [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Received event network-changed-6ffa751a-5526-4526-875d-e0756fac6200 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 996.474461] env[62109]: DEBUG nova.compute.manager [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Refreshing instance network info cache due to event network-changed-6ffa751a-5526-4526-875d-e0756fac6200. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 996.474719] env[62109]: DEBUG oslo_concurrency.lockutils [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] Acquiring lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.474885] env[62109]: DEBUG oslo_concurrency.lockutils [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] Acquired lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.475078] env[62109]: DEBUG nova.network.neutron [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Refreshing network info cache for port 6ffa751a-5526-4526-875d-e0756fac6200 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 996.475565] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] Expecting reply to msg 003a0ecb1b0c4a08b739e4f7bf7675f7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 996.482085] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 003a0ecb1b0c4a08b739e4f7bf7675f7 [ 996.565656] env[62109]: DEBUG nova.network.neutron [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.566230] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 4c636b58fb07402fb74dc96d608c577c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 996.575232] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c636b58fb07402fb74dc96d608c577c [ 996.611962] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.700s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.612625] env[62109]: ERROR nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Traceback (most recent call last): [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self.driver.spawn(context, instance, image_meta, [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self._vmops.spawn(context, instance, image_meta, injected_files, [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] vm_ref = self.build_virtual_machine(instance, [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] vif_infos = vmwarevif.get_vif_info(self._session, [ 996.612625] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] for vif in network_info: [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] return self._sync_wrapper(fn, *args, **kwargs) [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self.wait() [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self[:] = self._gt.wait() [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] return self._exit_event.wait() [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] current.throw(*self._exc) [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 996.613047] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] result = function(*args, **kwargs) [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] return func(*args, **kwargs) [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] raise e [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] nwinfo = self.network_api.allocate_for_instance( [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] created_port_ids = self._update_ports_for_instance( [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] with excutils.save_and_reraise_exception(): [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] self.force_reraise() [ 996.613468] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] raise self.value [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] updated_port = self._update_port( [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] _ensure_no_port_binding_failure(port) [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] raise exception.PortBindingFailed(port_id=port['id']) [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] nova.exception.PortBindingFailed: Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. [ 996.614318] env[62109]: ERROR nova.compute.manager [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] [ 996.614318] env[62109]: DEBUG nova.compute.utils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 996.614607] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 4.487s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.616529] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg b50ccf63e449474c83ccfaa3719ab1b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 996.623894] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Build of instance 121dbfda-87d9-4733-a7d2-3ffa6f54df36 was re-scheduled: Binding failed for port 9f70bcb1-71b1-4cc5-8744-c080b2082474, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 996.623894] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 996.623894] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Acquiring lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.623894] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Acquired lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.624148] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 996.624148] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 888192b6403c40ff93e9ea5b280728fc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 996.629649] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 888192b6403c40ff93e9ea5b280728fc [ 996.650617] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b50ccf63e449474c83ccfaa3719ab1b5 [ 996.876387] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401572, 'name': ReconfigVM_Task, 'duration_secs': 0.287111} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.876686] env[62109]: DEBUG nova.virt.vmwareapi.volumeops [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Reconfigured VM instance instance-0000005d to attach disk [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308/b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308.vmdk or device None with type sparse {{(pid=62109) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.877073] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54ef4c88-0c79-47ac-a445-7cd79ffd73fc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.883452] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 996.883452] env[62109]: value = "task-401573" [ 996.883452] env[62109]: _type = "Task" [ 996.883452] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.890300] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401573, 'name': Rename_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.900672] env[62109]: DEBUG nova.network.neutron [-] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.901135] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9d5223c169cd458daba07e4c5cb0abec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 996.910869] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d5223c169cd458daba07e4c5cb0abec [ 996.994711] env[62109]: DEBUG nova.network.neutron [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 997.068903] env[62109]: INFO nova.compute.manager [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 138f2594-adbe-4ce2-a395-40fae312981b] Took 1.02 seconds to deallocate network for instance. [ 997.070728] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg adf23687f26244e69d10414eb6a10fe1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 997.072524] env[62109]: DEBUG nova.network.neutron [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.072917] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] Expecting reply to msg 5f51092ff88f430386dbb4e362ffe9ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 997.080490] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f51092ff88f430386dbb4e362ffe9ec [ 997.100775] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adf23687f26244e69d10414eb6a10fe1 [ 997.144295] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 997.196768] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ebbf05-ce98-471e-a1c2-b85ed93e2ca3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.204239] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda0dcb5-591c-4f37-bc77-af7404ed52d7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.232836] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.233327] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg c9032dbbc15b4f6c951b889d574491a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 997.234528] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e0c376-2c97-409e-8122-a7fdf2f8de26 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.242095] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad5fb5b-79a1-4634-9228-46ce3a64d545 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.246103] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9032dbbc15b4f6c951b889d574491a2 [ 997.255525] env[62109]: DEBUG nova.compute.provider_tree [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.256077] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg e6a2de2892214f3e80af728e86100ff9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 997.264042] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6a2de2892214f3e80af728e86100ff9 [ 997.392881] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401573, 'name': Rename_Task, 'duration_secs': 0.134079} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.393109] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Powering on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 997.393591] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9eb06842-fea0-4d16-bcb3-eb77c88e8052 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.399189] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 997.399189] env[62109]: value = "task-401574" [ 997.399189] env[62109]: _type = "Task" [ 997.399189] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.403512] env[62109]: INFO nova.compute.manager [-] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Took 1.02 seconds to deallocate network for instance. [ 997.407619] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.408097] env[62109]: DEBUG nova.compute.claims [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 997.408272] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.576426] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg ac72bf1902bd4ab1a62386811b98780c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 997.577598] env[62109]: DEBUG oslo_concurrency.lockutils [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] Releasing lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.577823] env[62109]: DEBUG nova.compute.manager [req-39bfa1b8-807a-42cc-9e9f-84959c0f0d64 req-cc941089-28e0-4646-9b33-ec841606a34d service nova] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Received event network-vif-deleted-6ffa751a-5526-4526-875d-e0756fac6200 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 997.608328] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac72bf1902bd4ab1a62386811b98780c [ 997.738775] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Releasing lock "refresh_cache-121dbfda-87d9-4733-a7d2-3ffa6f54df36" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.739474] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 997.739474] env[62109]: DEBUG nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 997.739746] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 997.758572] env[62109]: DEBUG nova.scheduler.client.report [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 997.761230] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg c009a73dc22a4e358218dbecde8edb75 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 997.763308] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 997.763834] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 9b0808ccfd6045eb847fed0910637275 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 997.770997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b0808ccfd6045eb847fed0910637275 [ 997.773850] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c009a73dc22a4e358218dbecde8edb75 [ 997.912189] env[62109]: DEBUG oslo_vmware.api [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401574, 'name': PowerOnVM_Task, 'duration_secs': 0.397089} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.912189] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Powered on the VM {{(pid=62109) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 997.912189] env[62109]: DEBUG nova.compute.manager [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Checking state {{(pid=62109) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 997.912627] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c317627-422b-4536-aef0-15949ec8325b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.919659] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg ba0682407409445381d56a39c5f5d37d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 997.947620] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba0682407409445381d56a39c5f5d37d [ 998.098501] env[62109]: INFO nova.scheduler.client.report [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Deleted allocations for instance 138f2594-adbe-4ce2-a395-40fae312981b [ 998.104420] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 9977e35a677842dd8de83b4255cf5afe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.117945] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9977e35a677842dd8de83b4255cf5afe [ 998.157590] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 0fdea7d97c594bd7bccbbe50ab4f6525 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.170553] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fdea7d97c594bd7bccbbe50ab4f6525 [ 998.265919] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.651s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.266713] env[62109]: ERROR nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Traceback (most recent call last): [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self.driver.spawn(context, instance, image_meta, [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] vm_ref = self.build_virtual_machine(instance, [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] vif_infos = vmwarevif.get_vif_info(self._session, [ 998.266713] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] for vif in network_info: [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] return self._sync_wrapper(fn, *args, **kwargs) [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self.wait() [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self[:] = self._gt.wait() [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] return self._exit_event.wait() [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] current.throw(*self._exc) [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 998.267071] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] result = function(*args, **kwargs) [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] return func(*args, **kwargs) [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] raise e [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] nwinfo = self.network_api.allocate_for_instance( [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] created_port_ids = self._update_ports_for_instance( [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] with excutils.save_and_reraise_exception(): [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] self.force_reraise() [ 998.267460] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] raise self.value [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] updated_port = self._update_port( [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] _ensure_no_port_binding_failure(port) [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] raise exception.PortBindingFailed(port_id=port['id']) [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] nova.exception.PortBindingFailed: Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. [ 998.267844] env[62109]: ERROR nova.compute.manager [instance: 2e484358-0037-41b0-bf66-534fc7116d34] [ 998.268289] env[62109]: DEBUG nova.compute.utils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 998.269569] env[62109]: DEBUG nova.network.neutron [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.270081] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg ad54b0437bd646a28ad6fa32584590b5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.271017] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Build of instance 2e484358-0037-41b0-bf66-534fc7116d34 was re-scheduled: Binding failed for port 347cd73f-5ec1-45b9-9ed9-a242e25415ac, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 998.271479] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 998.271777] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.271987] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquired lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.272224] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 998.272640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg e78adcb0f2e54405a2a8cab1ab04ecb8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.273622] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.282s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.274418] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg af8fc8ae777e46528e17cb6ca0da4575 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.279257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e78adcb0f2e54405a2a8cab1ab04ecb8 [ 998.280549] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad54b0437bd646a28ad6fa32584590b5 [ 998.291025] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af8fc8ae777e46528e17cb6ca0da4575 [ 998.427149] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.606723] env[62109]: DEBUG oslo_concurrency.lockutils [None req-9142db86-db4a-470d-bee7-746e13ac7a7c tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "138f2594-adbe-4ce2-a395-40fae312981b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.603s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.659902] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.660092] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.660320] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.660502] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.660667] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.662791] env[62109]: INFO nova.compute.manager [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Terminating instance [ 998.664391] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "refresh_cache-b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.664543] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquired lock "refresh_cache-b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.664704] env[62109]: DEBUG nova.network.neutron [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 998.665097] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 5ce3a4bbfac241fda1b6d3d52e8e07d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.671390] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ce3a4bbfac241fda1b6d3d52e8e07d4 [ 998.775941] env[62109]: INFO nova.compute.manager [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] [instance: 121dbfda-87d9-4733-a7d2-3ffa6f54df36] Took 1.04 seconds to deallocate network for instance. [ 998.777642] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg a68493da29294880bf2f916dd7d4b4df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.782205] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 3f4a8e8269f2483e8989efc9c999be65 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.792032] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f4a8e8269f2483e8989efc9c999be65 [ 998.796358] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 998.810544] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a68493da29294880bf2f916dd7d4b4df [ 998.871091] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.871598] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg c13f70a3eb844dc994e082cb525c2ad4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 998.879332] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c13f70a3eb844dc994e082cb525c2ad4 [ 999.182515] env[62109]: DEBUG nova.network.neutron [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 999.242099] env[62109]: DEBUG nova.network.neutron [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.242687] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 3dfd65a18c9b417d8b7d92bf013f510f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 999.251384] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dfd65a18c9b417d8b7d92bf013f510f [ 999.283366] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 0d6e5b17b12840299b7c0bf7c42bf019 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 999.303895] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 999.304503] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 9365266c998347059dd58bcef6343ab3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 999.314851] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9365266c998347059dd58bcef6343ab3 [ 999.323108] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d6e5b17b12840299b7c0bf7c42bf019 [ 999.373214] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Releasing lock "refresh_cache-2e484358-0037-41b0-bf66-534fc7116d34" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.373434] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 999.373608] env[62109]: DEBUG nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 999.373765] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 999.393063] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 999.393572] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 3782f813e8bb417aa0bd2bd7ff539ee2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 999.399950] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3782f813e8bb417aa0bd2bd7ff539ee2 [ 999.745025] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Releasing lock "refresh_cache-b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.745605] env[62109]: DEBUG nova.compute.manager [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 999.745821] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 999.746793] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1f2f2f-d9b5-4a0d-8133-17b139e6b439 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.755413] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Powering off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 999.755634] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a56c4a63-bc82-4938-92cc-ed895f4afcca {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.761982] env[62109]: DEBUG oslo_vmware.api [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 999.761982] env[62109]: value = "task-401575" [ 999.761982] env[62109]: _type = "Task" [ 999.761982] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.770546] env[62109]: DEBUG oslo_vmware.api [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.806104] env[62109]: INFO nova.scheduler.client.report [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Deleted allocations for instance 121dbfda-87d9-4733-a7d2-3ffa6f54df36 [ 999.812556] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Expecting reply to msg 162b8cd26de54348922e327300b749f0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 999.817012] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 121dbfda-87d9-4733-a7d2-3ffa6f54df36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 999.817353] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 6a5ae3c1a659469284e11ff75473a1ba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 999.827056] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 162b8cd26de54348922e327300b749f0 [ 999.834359] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a5ae3c1a659469284e11ff75473a1ba [ 999.896444] env[62109]: DEBUG nova.network.neutron [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.896993] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 70203e54b51c492d8424e1ebc6ea46bb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 999.905198] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70203e54b51c492d8424e1ebc6ea46bb [ 1000.066864] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "f6587428-62b2-4e71-a585-4f794c96d04e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.067092] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "f6587428-62b2-4e71-a585-4f794c96d04e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.067537] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg f23dff1501a648139ca0fcd943a47bca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1000.076891] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f23dff1501a648139ca0fcd943a47bca [ 1000.271450] env[62109]: DEBUG oslo_vmware.api [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401575, 'name': PowerOffVM_Task, 'duration_secs': 0.131185} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.271905] env[62109]: DEBUG nova.virt.vmwareapi.vm_util [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Powered off the VM {{(pid=62109) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1000.271905] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Unregistering the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1000.272138] env[62109]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3164739-404b-49d6-9151-5a74677322b1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.296213] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Unregistered the VM {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1000.296357] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Deleting contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1000.296506] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Deleting the datastore file [datastore1] b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308 {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.296770] env[62109]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53e945be-90d1-40d4-82bb-99e23c917781 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.302519] env[62109]: DEBUG oslo_vmware.api [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for the task: (returnval){ [ 1000.302519] env[62109]: value = "task-401577" [ 1000.302519] env[62109]: _type = "Task" [ 1000.302519] env[62109]: } to complete. {{(pid=62109) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.309933] env[62109]: DEBUG oslo_vmware.api [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.316538] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e0c8b7e4-7390-4487-864b-bd77985d49b7 tempest-ServersNegativeTestJSON-1356953462 tempest-ServersNegativeTestJSON-1356953462-project-member] Lock "121dbfda-87d9-4733-a7d2-3ffa6f54df36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.867s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.319293] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 2e484358-0037-41b0-bf66-534fc7116d34 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1000.319484] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.319601] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Instance 681f4b00-1ed8-47fb-9117-aa3745096e66 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62109) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.319728] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1000.320897] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62109) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1000.390525] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89110fbc-e560-4742-a599-b66db296b64a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.397993] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a063bd-6eae-419b-97b3-60ce23a290b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.401309] env[62109]: INFO nova.compute.manager [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: 2e484358-0037-41b0-bf66-534fc7116d34] Took 1.03 seconds to deallocate network for instance. [ 1000.403106] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 14b1c73bd5b2488481e7c05d187f4fe5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1000.431649] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dd0de5-b685-44a0-9f11-44b65395971b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.437612] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14b1c73bd5b2488481e7c05d187f4fe5 [ 1000.438914] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc0570d-a953-49d8-8383-baf6d270a831 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.453417] env[62109]: DEBUG nova.compute.provider_tree [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.453855] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 3ef43bda0c604c0a96322f804939099c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1000.460284] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef43bda0c604c0a96322f804939099c [ 1000.569717] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1000.571462] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 7485f7bedb7f466da014accf448c1017 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1000.602035] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7485f7bedb7f466da014accf448c1017 [ 1000.812314] env[62109]: DEBUG oslo_vmware.api [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Task: {'id': task-401577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097777} completed successfully. {{(pid=62109) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.812791] env[62109]: DEBUG nova.virt.vmwareapi.ds_util [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Deleted the datastore file {{(pid=62109) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.813194] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Deleted contents of the VM from datastore datastore1 {{(pid=62109) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1000.813636] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1000.814035] env[62109]: INFO nova.compute.manager [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1000.814394] env[62109]: DEBUG oslo.service.loopingcall [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1000.814698] env[62109]: DEBUG nova.compute.manager [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1000.814892] env[62109]: DEBUG nova.network.neutron [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1000.828765] env[62109]: DEBUG nova.network.neutron [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1000.829378] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f616056313ff4098b8890860cd093b10 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1000.836497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f616056313ff4098b8890860cd093b10 [ 1000.908182] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 9625f4cf733846e88641642601a8312d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1000.956497] env[62109]: DEBUG nova.scheduler.client.report [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1000.959566] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Expecting reply to msg 6153c22280234d5a8d8b0924abd2f89f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1000.965818] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9625f4cf733846e88641642601a8312d [ 1000.969895] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6153c22280234d5a8d8b0924abd2f89f [ 1001.088813] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.333123] env[62109]: DEBUG nova.network.neutron [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.333123] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4fbfec78fb8b43cf8fe3a2e1dbb2129e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1001.341745] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fbfec78fb8b43cf8fe3a2e1dbb2129e [ 1001.433504] env[62109]: INFO nova.scheduler.client.report [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Deleted allocations for instance 2e484358-0037-41b0-bf66-534fc7116d34 [ 1001.440615] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 38f85525dadb40f79ae022882e7b7ead in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1001.476804] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38f85525dadb40f79ae022882e7b7ead [ 1001.478762] env[62109]: DEBUG nova.compute.resource_tracker [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62109) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1001.480500] env[62109]: DEBUG oslo_concurrency.lockutils [None req-97b8f4a0-d028-4ceb-af5e-6b24b67488f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.207s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.485712] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.709s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.488579] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg ed5d1cdea3374184a49d027b666f87e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1001.534223] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed5d1cdea3374184a49d027b666f87e7 [ 1001.835306] env[62109]: INFO nova.compute.manager [-] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Took 1.02 seconds to deallocate network for instance. [ 1001.839292] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 974ad0fbbd5b4e1f931b8d8e800d5dec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1001.871800] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 974ad0fbbd5b4e1f931b8d8e800d5dec [ 1001.942227] env[62109]: DEBUG oslo_concurrency.lockutils [None req-e086231c-0ff8-43f9-92f7-bb0c4d4e59d8 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "2e484358-0037-41b0-bf66-534fc7116d34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.068s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.056602] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b15356-5f26-4da7-9f04-a9b4956e6038 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.064313] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffc9952-ef23-40b5-a4ca-5fe88a9d04b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.094012] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2bcaf7-7376-4a63-ab48-c36a45e63624 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.100912] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfae2700-d473-4dbc-b7af-55b56e537eb5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.117771] env[62109]: DEBUG nova.compute.provider_tree [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.118328] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 00eef98686ef46f9b06029dcac16c889 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1002.127263] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00eef98686ef46f9b06029dcac16c889 [ 1002.342563] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.620746] env[62109]: DEBUG nova.scheduler.client.report [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.623076] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 109f4abfb0e24adcb657556d0c474e60 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1002.634336] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 109f4abfb0e24adcb657556d0c474e60 [ 1003.125896] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.640s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.126548] env[62109]: ERROR nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Traceback (most recent call last): [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self.driver.spawn(context, instance, image_meta, [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] vm_ref = self.build_virtual_machine(instance, [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 1003.126548] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] for vif in network_info: [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] return self._sync_wrapper(fn, *args, **kwargs) [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self.wait() [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self[:] = self._gt.wait() [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] return self._exit_event.wait() [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] current.throw(*self._exc) [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1003.126895] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] result = function(*args, **kwargs) [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] return func(*args, **kwargs) [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] raise e [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] nwinfo = self.network_api.allocate_for_instance( [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] created_port_ids = self._update_ports_for_instance( [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] with excutils.save_and_reraise_exception(): [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] self.force_reraise() [ 1003.127534] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] raise self.value [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] updated_port = self._update_port( [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] _ensure_no_port_binding_failure(port) [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] raise exception.PortBindingFailed(port_id=port['id']) [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] nova.exception.PortBindingFailed: Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. [ 1003.127922] env[62109]: ERROR nova.compute.manager [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] [ 1003.127922] env[62109]: DEBUG nova.compute.utils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1003.128528] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.720s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.130373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg e7d7c0035e5849b7b262143131c6aa19 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1003.140401] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Build of instance 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf was re-scheduled: Binding failed for port 9a962537-f015-4cde-9316-5978f15b14e0, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1003.140907] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1003.141162] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.141317] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.141474] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1003.141854] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg fe445ff3db1b412b8ec9b0d2ce37456e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1003.148392] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe445ff3db1b412b8ec9b0d2ce37456e [ 1003.174115] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7d7c0035e5849b7b262143131c6aa19 [ 1003.676117] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1003.712758] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a38287-597b-4145-a338-ba9bcfb58209 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.721873] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380dace1-c035-4f7d-b6f2-50f188b721db {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.751540] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5504462b-2512-4d96-9390-f53aa7d4af91 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.758671] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee306d6b-8417-4c5a-a4a6-d0366deb43d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.771679] env[62109]: DEBUG nova.compute.provider_tree [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.772203] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 122f045fe8ad4703abd1d362e4bb09e2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1003.774148] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.774712] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a09d5f936a1c4375a53c109b0f4ec5ab in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1003.779456] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 122f045fe8ad4703abd1d362e4bb09e2 [ 1003.782779] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a09d5f936a1c4375a53c109b0f4ec5ab [ 1004.274932] env[62109]: DEBUG nova.scheduler.client.report [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.277524] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg d68659c96c1b457cbc3204f42bba6ebc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1004.279528] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.279644] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1004.279748] env[62109]: DEBUG nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1004.279915] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1004.288147] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d68659c96c1b457cbc3204f42bba6ebc [ 1004.294308] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1004.294851] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 34cef284683a41d98be27b00993c84aa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1004.301130] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34cef284683a41d98be27b00993c84aa [ 1004.462574] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.462851] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.463377] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg eaa98c91f73d4de693e5e940c1e53134 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1004.472427] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaa98c91f73d4de693e5e940c1e53134 [ 1004.780436] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.652s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.781097] env[62109]: ERROR nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Traceback (most recent call last): [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self.driver.spawn(context, instance, image_meta, [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] vm_ref = self.build_virtual_machine(instance, [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] vif_infos = vmwarevif.get_vif_info(self._session, [ 1004.781097] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] for vif in network_info: [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] return self._sync_wrapper(fn, *args, **kwargs) [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self.wait() [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self[:] = self._gt.wait() [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] return self._exit_event.wait() [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] current.throw(*self._exc) [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1004.781469] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] result = function(*args, **kwargs) [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] return func(*args, **kwargs) [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] raise e [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] nwinfo = self.network_api.allocate_for_instance( [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] created_port_ids = self._update_ports_for_instance( [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] with excutils.save_and_reraise_exception(): [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] self.force_reraise() [ 1004.781888] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] raise self.value [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] updated_port = self._update_port( [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] _ensure_no_port_binding_failure(port) [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] raise exception.PortBindingFailed(port_id=port['id']) [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] nova.exception.PortBindingFailed: Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. [ 1004.782298] env[62109]: ERROR nova.compute.manager [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] [ 1004.782298] env[62109]: DEBUG nova.compute.utils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1004.783574] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Build of instance 681f4b00-1ed8-47fb-9117-aa3745096e66 was re-scheduled: Binding failed for port 6ffa751a-5526-4526-875d-e0756fac6200, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1004.783995] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1004.784236] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Acquiring lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.784385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Acquired lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.784541] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1004.785005] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 40ba0565e1a3404c8d5cfe99877a201b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1004.786234] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.359s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.786533] env[62109]: DEBUG nova.objects.instance [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] [instance: b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62109) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1004.787989] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg e48933578ed54e6ba7dc9073ab2bf266 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1004.793041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40ba0565e1a3404c8d5cfe99877a201b [ 1004.796209] env[62109]: DEBUG nova.network.neutron [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.796746] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg fe570418d3af4c3ebf37395aaed804df in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1004.803442] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe570418d3af4c3ebf37395aaed804df [ 1004.815668] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e48933578ed54e6ba7dc9073ab2bf266 [ 1004.966054] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1004.968209] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 22d693b652b54e92b6773709c8852cbd in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1005.000208] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22d693b652b54e92b6773709c8852cbd [ 1005.292210] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 5b19b4e0188446a4952f124b780dcd2a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1005.298207] env[62109]: INFO nova.compute.manager [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf] Took 1.02 seconds to deallocate network for instance. [ 1005.300373] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 1d3b0d3343cb4e3d8bdfdbe1c373f2c1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1005.302083] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b19b4e0188446a4952f124b780dcd2a [ 1005.303463] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1005.332223] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d3b0d3343cb4e3d8bdfdbe1c373f2c1 [ 1005.382340] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.382837] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 79facd44dc0047a8a6e1df4f4e63ab38 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1005.390359] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79facd44dc0047a8a6e1df4f4e63ab38 [ 1005.488303] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.795565] env[62109]: DEBUG oslo_concurrency.lockutils [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.796031] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-eff708e3-eb06-4078-b77f-334b03126925 tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg e6010fa702b947a78d9298e76f987666 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1005.796950] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.708s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.798348] env[62109]: INFO nova.compute.claims [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.799864] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 74c4e84866614927a05c4f62d64d4eed in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1005.804092] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 44d3f435771f4cefb86f87b82973942d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1005.817700] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6010fa702b947a78d9298e76f987666 [ 1005.833168] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44d3f435771f4cefb86f87b82973942d [ 1005.833655] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74c4e84866614927a05c4f62d64d4eed [ 1005.885212] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Releasing lock "refresh_cache-681f4b00-1ed8-47fb-9117-aa3745096e66" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.885434] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1005.885609] env[62109]: DEBUG nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1005.885772] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1005.900426] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1005.900996] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg f18347fb032e4c58adf347787062da9f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1005.907310] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f18347fb032e4c58adf347787062da9f [ 1006.303345] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 28c389b9b99442d6b9661867eb42f816 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1006.312608] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28c389b9b99442d6b9661867eb42f816 [ 1006.328495] env[62109]: INFO nova.scheduler.client.report [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Deleted allocations for instance 31c90ad4-71c9-4fea-b548-a8d0e8ff56bf [ 1006.334041] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg a806ab7f88df4ee1b34dfb2de02b974e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1006.343346] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a806ab7f88df4ee1b34dfb2de02b974e [ 1006.403058] env[62109]: DEBUG nova.network.neutron [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.403602] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg ff597b18de2545859638dce0b1d395a7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1006.409972] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff597b18de2545859638dce0b1d395a7 [ 1006.836100] env[62109]: DEBUG oslo_concurrency.lockutils [None req-338b145e-959f-4ce0-811b-0dcff2db841b tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "31c90ad4-71c9-4fea-b548-a8d0e8ff56bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.502s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.861026] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ab337b-43db-4474-984e-46626d8baf36 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.868737] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6149163e-699d-4a9b-93cd-3caeadb89805 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.897373] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bd51b4-8bb0-4157-9924-d49c61b91595 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.904055] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4213c4-9917-4bcf-a156-d0b828d4ed67 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.907746] env[62109]: INFO nova.compute.manager [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] [instance: 681f4b00-1ed8-47fb-9117-aa3745096e66] Took 1.02 seconds to deallocate network for instance. [ 1006.909360] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 40826ea3b72f44bea3f047fa2477dc17 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1006.918919] env[62109]: DEBUG nova.compute.provider_tree [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.919417] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 3eef49940eb542eca01cf7df486db9bb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1006.925997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3eef49940eb542eca01cf7df486db9bb [ 1006.943912] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40826ea3b72f44bea3f047fa2477dc17 [ 1007.414303] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg 23cba84229ed4f0793503c2308b11d2e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1007.421551] env[62109]: DEBUG nova.scheduler.client.report [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1007.423887] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg b358e3b975d547b2a803190eb37aaa20 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1007.433724] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b358e3b975d547b2a803190eb37aaa20 [ 1007.445740] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23cba84229ed4f0793503c2308b11d2e [ 1007.926454] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.129s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.926941] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1007.928792] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d7efa0e7d4924c0fb9f7d2be794e1fa0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1007.929727] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.587s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.929941] env[62109]: DEBUG nova.objects.instance [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lazy-loading 'resources' on Instance uuid b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308 {{(pid=62109) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.930245] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg d3854b3ac8b1428dade29ba89c5bcf4a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1007.934267] env[62109]: INFO nova.scheduler.client.report [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Deleted allocations for instance 681f4b00-1ed8-47fb-9117-aa3745096e66 [ 1007.939985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3854b3ac8b1428dade29ba89c5bcf4a [ 1007.943287] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Expecting reply to msg c632dd0565614ead80e11c7776b0ed71 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1007.953431] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c632dd0565614ead80e11c7776b0ed71 [ 1007.975871] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7efa0e7d4924c0fb9f7d2be794e1fa0 [ 1007.999998] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff4c784-039b-4e90-b70f-4e1ea4c591ff {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.007481] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf76c23b-916a-4755-a8f3-8e87acf7a420 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.035762] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473b1eda-d26e-4d8e-b3da-b2ee460fa460 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.042450] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa24f056-ab3d-49c1-b7e0-773082fa87fa {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.055033] env[62109]: DEBUG nova.compute.provider_tree [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.055530] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 5d1c860f4b3d40b39bf5fc3b85e3bc17 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1008.063351] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d1c860f4b3d40b39bf5fc3b85e3bc17 [ 1008.404395] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.404623] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.405073] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg f643f6a550864eec9c1c260547271532 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1008.413248] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f643f6a550864eec9c1c260547271532 [ 1008.440975] env[62109]: DEBUG nova.compute.utils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1008.441590] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg edb637b25f1b45acbff2e133160bc3d4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1008.442476] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1008.442647] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1008.444759] env[62109]: DEBUG oslo_concurrency.lockutils [None req-51943235-ad3a-49a7-a9b9-49ac95f4e3e4 tempest-ServerAddressesNegativeTestJSON-1097886150 tempest-ServerAddressesNegativeTestJSON-1097886150-project-member] Lock "681f4b00-1ed8-47fb-9117-aa3745096e66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.409s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.455791] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edb637b25f1b45acbff2e133160bc3d4 [ 1008.494371] env[62109]: DEBUG nova.policy [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d003a5943d141cc9165e43148dec381', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a669cc62964b05bdaa71442c08a566', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 1008.558366] env[62109]: DEBUG nova.scheduler.client.report [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1008.560875] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg af444313d9714271a5542cd0cd3122b4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1008.574997] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af444313d9714271a5542cd0cd3122b4 [ 1008.754334] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Successfully created port: 6801707a-9500-4be7-9efe-705062339599 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1008.906805] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1008.908615] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 156e746baf9b4db88c714f7b414987b0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1008.945335] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1008.947155] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 587dca96078f4915999a43c0f20c5eca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1008.958459] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 156e746baf9b4db88c714f7b414987b0 [ 1008.985217] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 587dca96078f4915999a43c0f20c5eca [ 1009.063614] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.134s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.069819] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.578s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.069819] env[62109]: INFO nova.compute.claims [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.069819] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 74ff88d5469444bdb0c2b2eb8e8a7d13 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1009.087332] env[62109]: INFO nova.scheduler.client.report [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Deleted allocations for instance b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308 [ 1009.090572] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg 82d6debf57354792a91d2e14920d144f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1009.108589] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74ff88d5469444bdb0c2b2eb8e8a7d13 [ 1009.141949] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82d6debf57354792a91d2e14920d144f [ 1009.430256] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.452309] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 11bf4f8006ba4c28a5f2cdf482973645 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1009.496666] env[62109]: DEBUG nova.compute.manager [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Received event network-changed-6801707a-9500-4be7-9efe-705062339599 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1009.496934] env[62109]: DEBUG nova.compute.manager [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Refreshing instance network info cache due to event network-changed-6801707a-9500-4be7-9efe-705062339599. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1009.497197] env[62109]: DEBUG oslo_concurrency.lockutils [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] Acquiring lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.497383] env[62109]: DEBUG oslo_concurrency.lockutils [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] Acquired lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.497581] env[62109]: DEBUG nova.network.neutron [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Refreshing network info cache for port 6801707a-9500-4be7-9efe-705062339599 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1009.498054] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] Expecting reply to msg a0212ae9c24a43ec9dd6911cbe2c51b6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1009.503900] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11bf4f8006ba4c28a5f2cdf482973645 [ 1009.506989] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0212ae9c24a43ec9dd6911cbe2c51b6 [ 1009.573167] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 3e82f752241a4bac9e7c28d5560978da in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1009.583608] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e82f752241a4bac9e7c28d5560978da [ 1009.594150] env[62109]: DEBUG oslo_concurrency.lockutils [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Lock "b26c8ba7-e41f-4ebd-a7f5-da7ae1b03308" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.934s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.594499] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-55101d59-0bae-41ea-b08b-ae5adcc7d16c tempest-ServerShowV257Test-219043525 tempest-ServerShowV257Test-219043525-project-member] Expecting reply to msg a78889e6a59246bb9b2e53cada6da11a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1009.612073] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a78889e6a59246bb9b2e53cada6da11a [ 1009.678202] env[62109]: ERROR nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. [ 1009.678202] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 1009.678202] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1009.678202] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1009.678202] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1009.678202] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1009.678202] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1009.678202] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1009.678202] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1009.678202] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 1009.678202] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1009.678202] env[62109]: ERROR nova.compute.manager raise self.value [ 1009.678202] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1009.678202] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 1009.678202] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1009.678202] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1009.679003] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1009.679003] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1009.679003] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. [ 1009.679003] env[62109]: ERROR nova.compute.manager [ 1009.679003] env[62109]: Traceback (most recent call last): [ 1009.679003] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1009.679003] env[62109]: listener.cb(fileno) [ 1009.679003] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1009.679003] env[62109]: result = function(*args, **kwargs) [ 1009.679003] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1009.679003] env[62109]: return func(*args, **kwargs) [ 1009.679003] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1009.679003] env[62109]: raise e [ 1009.679003] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1009.679003] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 1009.679003] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1009.679003] env[62109]: created_port_ids = self._update_ports_for_instance( [ 1009.679003] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1009.679003] env[62109]: with excutils.save_and_reraise_exception(): [ 1009.679003] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1009.679003] env[62109]: self.force_reraise() [ 1009.679003] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1009.679003] env[62109]: raise self.value [ 1009.679003] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1009.679003] env[62109]: updated_port = self._update_port( [ 1009.679003] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1009.679003] env[62109]: _ensure_no_port_binding_failure(port) [ 1009.679003] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1009.679003] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 1009.680334] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. [ 1009.680334] env[62109]: Removing descriptor: 19 [ 1009.955814] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1009.996617] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1009.996617] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1009.996617] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.996789] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1010.001058] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.001058] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1010.001058] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1010.001058] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1010.001058] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1010.001426] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1010.001426] env[62109]: DEBUG nova.virt.hardware [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1010.001426] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b97f6ee-45b8-463e-b536-9f55a50e9881 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.010711] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d371cc-623b-40c7-8637-7d6c465c5561 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.029749] env[62109]: ERROR nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Traceback (most recent call last): [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] yield resources [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self.driver.spawn(context, instance, image_meta, [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] vm_ref = self.build_virtual_machine(instance, [ 1010.029749] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] vif_infos = vmwarevif.get_vif_info(self._session, [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] for vif in network_info: [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] return self._sync_wrapper(fn, *args, **kwargs) [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self.wait() [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self[:] = self._gt.wait() [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] return self._exit_event.wait() [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1010.030156] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] current.throw(*self._exc) [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] result = function(*args, **kwargs) [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] return func(*args, **kwargs) [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] raise e [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] nwinfo = self.network_api.allocate_for_instance( [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] created_port_ids = self._update_ports_for_instance( [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] with excutils.save_and_reraise_exception(): [ 1010.030645] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self.force_reraise() [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] raise self.value [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] updated_port = self._update_port( [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] _ensure_no_port_binding_failure(port) [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] raise exception.PortBindingFailed(port_id=port['id']) [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] nova.exception.PortBindingFailed: Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. [ 1010.031067] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] [ 1010.031067] env[62109]: INFO nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Terminating instance [ 1010.037493] env[62109]: DEBUG nova.network.neutron [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1010.040747] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.130470] env[62109]: DEBUG nova.network.neutron [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.130985] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] Expecting reply to msg 8133ac6bad6b41528c642bc0deff8c68 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1010.135902] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506da9a0-d84b-46f9-8b7a-d1f146ef27a5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.141114] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8133ac6bad6b41528c642bc0deff8c68 [ 1010.144373] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0e402b-5452-4e65-b7f9-4bd00eb16e34 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.188376] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0cb80c-35c0-43d0-b5b6-08eecd57d6d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.198715] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f129b7-2c43-4e1e-a3ac-f8c4225d7596 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.218562] env[62109]: DEBUG nova.compute.provider_tree [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.219110] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 86c134e96f874f03b83cf41b21d05fdb in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1010.225952] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86c134e96f874f03b83cf41b21d05fdb [ 1010.633910] env[62109]: DEBUG oslo_concurrency.lockutils [req-8ff0b54a-19ba-4a2b-9440-169c8217ea00 req-faee9aa7-7f19-4dc8-b78f-cc78a124e32c service nova] Releasing lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.634340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.634535] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1010.634992] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 5919917ecd3e47ac92fbb11f64aa1354 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1010.642623] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5919917ecd3e47ac92fbb11f64aa1354 [ 1010.722080] env[62109]: DEBUG nova.scheduler.client.report [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1010.724372] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 3bb51b3eead845a0ac27ad149aadc316 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1010.736080] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bb51b3eead845a0ac27ad149aadc316 [ 1011.153126] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1011.226653] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.161s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.227156] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1011.229309] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg b251ce89e3c740eebea26735339adb24 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1011.230959] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.231551] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d465e1032d394f24a0923b95d0a9259c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1011.232177] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.802s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.233635] env[62109]: INFO nova.compute.claims [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1011.235109] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg c9605c1c799a4138876433ea81a24d01 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1011.249732] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d465e1032d394f24a0923b95d0a9259c [ 1011.288384] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9605c1c799a4138876433ea81a24d01 [ 1011.291652] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b251ce89e3c740eebea26735339adb24 [ 1011.525586] env[62109]: DEBUG nova.compute.manager [req-3939e950-1772-4c01-bfa0-3ffe99a28aea req-cfd5594b-24e2-4ab7-9ffa-9076dc4e5da1 service nova] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Received event network-vif-deleted-6801707a-9500-4be7-9efe-705062339599 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1011.737947] env[62109]: DEBUG nova.compute.utils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1011.738664] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 7468f2a3bcfd4c73aad3e3a280dcea1f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1011.740022] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1011.740168] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1011.743716] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.744084] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1011.744281] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1011.745640] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 4d640e6c611143019df904fe9005a21e in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1011.746358] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88e072f1-5d5b-4d58-867b-11e380ae5f14 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.748967] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7468f2a3bcfd4c73aad3e3a280dcea1f [ 1011.752996] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d640e6c611143019df904fe9005a21e [ 1011.757461] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3832652-f6b0-4e36-95c6-0d891531d0b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.777656] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f6587428-62b2-4e71-a585-4f794c96d04e could not be found. [ 1011.777854] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1011.778055] env[62109]: INFO nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1011.778278] env[62109]: DEBUG oslo.service.loopingcall [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.778487] env[62109]: DEBUG nova.compute.manager [-] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1011.778577] env[62109]: DEBUG nova.network.neutron [-] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1011.781086] env[62109]: DEBUG nova.policy [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2883d8b8ab764050a13c8b3a56318c34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98934316e57a4ea69b2bb5a2f2aaf251', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 1011.795303] env[62109]: DEBUG nova.network.neutron [-] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1011.795738] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6b62b04a95634ff29f92cc3d9bfa1a78 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1011.801827] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b62b04a95634ff29f92cc3d9bfa1a78 [ 1012.055298] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Successfully created port: 7ef816eb-00da-4479-b932-14b1601845cd {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1012.243852] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1012.245870] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg ae6b00dbcdb24f159893ce9feb1e7eaf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1012.287644] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae6b00dbcdb24f159893ce9feb1e7eaf [ 1012.297506] env[62109]: DEBUG nova.network.neutron [-] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.297929] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6ef64c8c273f4107856817842ea605d2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1012.306342] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc79c2e-0526-4df2-82d2-9cf073aab93f {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.309167] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ef64c8c273f4107856817842ea605d2 [ 1012.314594] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b05c44-fd40-4afe-908d-37d27aa8b9e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.344288] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b27986-1ddf-4ba6-968b-add7267dac9a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.351225] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb83f62b-f8ee-436c-a272-72a7794a77bf {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.364761] env[62109]: DEBUG nova.compute.provider_tree [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.365254] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg f0e5cbde62da4166a7faa519e6c154a5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1012.373699] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0e5cbde62da4166a7faa519e6c154a5 [ 1012.750707] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 8015f40c292c4ae3a2324775afc6d703 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1012.782362] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8015f40c292c4ae3a2324775afc6d703 [ 1012.801393] env[62109]: INFO nova.compute.manager [-] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Took 1.02 seconds to deallocate network for instance. [ 1012.803588] env[62109]: DEBUG nova.compute.claims [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1012.803751] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.865875] env[62109]: ERROR nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. [ 1012.865875] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 1012.865875] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1012.865875] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1012.865875] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1012.865875] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1012.865875] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1012.865875] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1012.865875] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1012.865875] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 1012.865875] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1012.865875] env[62109]: ERROR nova.compute.manager raise self.value [ 1012.865875] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1012.865875] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 1012.865875] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1012.865875] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1012.866372] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1012.866372] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1012.866372] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. [ 1012.866372] env[62109]: ERROR nova.compute.manager [ 1012.866372] env[62109]: Traceback (most recent call last): [ 1012.866372] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1012.866372] env[62109]: listener.cb(fileno) [ 1012.866372] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1012.866372] env[62109]: result = function(*args, **kwargs) [ 1012.866372] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1012.866372] env[62109]: return func(*args, **kwargs) [ 1012.866372] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1012.866372] env[62109]: raise e [ 1012.866372] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1012.866372] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 1012.866372] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1012.866372] env[62109]: created_port_ids = self._update_ports_for_instance( [ 1012.866372] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1012.866372] env[62109]: with excutils.save_and_reraise_exception(): [ 1012.866372] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1012.866372] env[62109]: self.force_reraise() [ 1012.866372] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1012.866372] env[62109]: raise self.value [ 1012.866372] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1012.866372] env[62109]: updated_port = self._update_port( [ 1012.866372] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1012.866372] env[62109]: _ensure_no_port_binding_failure(port) [ 1012.866372] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1012.866372] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 1012.867296] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. [ 1012.867296] env[62109]: Removing descriptor: 19 [ 1012.868225] env[62109]: DEBUG nova.scheduler.client.report [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1012.870599] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg f4963c287c0d4fb788f5a48ccc6dd19a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1012.881944] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4963c287c0d4fb788f5a48ccc6dd19a [ 1013.254411] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1013.280427] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1013.280730] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1013.280903] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1013.281123] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1013.281304] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1013.281489] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1013.281730] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1013.281922] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1013.282125] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1013.282324] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1013.282532] env[62109]: DEBUG nova.virt.hardware [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1013.283475] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838d69da-7e78-426f-91d3-6ffeacb27eb6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.292136] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb83561-9ad6-486d-a719-4cea8770c7d6 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.306649] env[62109]: ERROR nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Traceback (most recent call last): [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] yield resources [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self.driver.spawn(context, instance, image_meta, [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] vm_ref = self.build_virtual_machine(instance, [ 1013.306649] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] vif_infos = vmwarevif.get_vif_info(self._session, [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] for vif in network_info: [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] return self._sync_wrapper(fn, *args, **kwargs) [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self.wait() [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self[:] = self._gt.wait() [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] return self._exit_event.wait() [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1013.307121] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] current.throw(*self._exc) [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] result = function(*args, **kwargs) [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] return func(*args, **kwargs) [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] raise e [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] nwinfo = self.network_api.allocate_for_instance( [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] created_port_ids = self._update_ports_for_instance( [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] with excutils.save_and_reraise_exception(): [ 1013.307564] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self.force_reraise() [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] raise self.value [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] updated_port = self._update_port( [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] _ensure_no_port_binding_failure(port) [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] raise exception.PortBindingFailed(port_id=port['id']) [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] nova.exception.PortBindingFailed: Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. [ 1013.308053] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] [ 1013.308053] env[62109]: INFO nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Terminating instance [ 1013.309263] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.309417] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquired lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.309575] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1013.309977] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg c02a2bc8b0214ffa91c674c896293621 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1013.318657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c02a2bc8b0214ffa91c674c896293621 [ 1013.373229] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.373761] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1013.375396] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 33c3bea4a34449148063a7bfb53c1f9f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1013.376557] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.573s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.378143] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg b9148ec15fef49e6b8f7c7b97bb5dbba in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1013.407105] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33c3bea4a34449148063a7bfb53c1f9f [ 1013.408191] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9148ec15fef49e6b8f7c7b97bb5dbba [ 1013.550030] env[62109]: DEBUG nova.compute.manager [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Received event network-changed-7ef816eb-00da-4479-b932-14b1601845cd {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1013.550193] env[62109]: DEBUG nova.compute.manager [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Refreshing instance network info cache due to event network-changed-7ef816eb-00da-4479-b932-14b1601845cd. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1013.550405] env[62109]: DEBUG oslo_concurrency.lockutils [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] Acquiring lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.825903] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1013.881013] env[62109]: DEBUG nova.compute.utils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1013.881942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 5904c30d6a5a4751b7c16863df79f707 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1013.886352] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1013.886724] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1013.893768] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5904c30d6a5a4751b7c16863df79f707 [ 1013.904842] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.905309] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg d59b93609daa4595becf795219d904e6 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1013.917647] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d59b93609daa4595becf795219d904e6 [ 1013.924185] env[62109]: DEBUG nova.policy [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba5969be1a254281b4dffd81aa84ae7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa59611b27bb41beb282e68ccfa6fadc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 1013.936915] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d2cfd7-aa8e-4817-a90c-fa071ed121e2 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.944054] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3b188b-5a48-477c-abf1-5996f9b60638 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.975481] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d959e05-ba53-4d19-b6be-1395fc0bede1 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.982234] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da67a2cd-19b1-421f-a1ed-4843a2711a79 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.995022] env[62109]: DEBUG nova.compute.provider_tree [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.995512] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 45918d7512ac4e71b35a486dcfac0d4b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1014.004341] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45918d7512ac4e71b35a486dcfac0d4b [ 1014.159081] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Successfully created port: 8b395229-2200-402f-a201-d6593182bba4 {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1014.387539] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1014.390030] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 9cc165d9c83d45c0ad07a68af7d1e88c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1014.410073] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Releasing lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.410520] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1014.410787] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1014.411131] env[62109]: DEBUG oslo_concurrency.lockutils [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] Acquired lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.411360] env[62109]: DEBUG nova.network.neutron [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Refreshing network info cache for port 7ef816eb-00da-4479-b932-14b1601845cd {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1014.411899] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] Expecting reply to msg 835d1b94dbea43d5a65c2ecaac66e8c4 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1014.412833] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3cc4bba-14b4-44fb-8a46-cf9cc5875854 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.419938] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 835d1b94dbea43d5a65c2ecaac66e8c4 [ 1014.423690] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6fd1f1-2b03-4573-aad3-8eeb891202d9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.437175] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cc165d9c83d45c0ad07a68af7d1e88c [ 1014.448315] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e34cf886-0f9b-4b9c-91c9-d04d9edb08e2 could not be found. [ 1014.448570] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1014.448714] env[62109]: INFO nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1014.448950] env[62109]: DEBUG oslo.service.loopingcall [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.449232] env[62109]: DEBUG nova.compute.manager [-] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1014.449378] env[62109]: DEBUG nova.network.neutron [-] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1014.461242] env[62109]: DEBUG nova.network.neutron [-] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1014.461680] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0c7471333bf24eefac3c07614fc7684b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1014.467651] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c7471333bf24eefac3c07614fc7684b [ 1014.497816] env[62109]: DEBUG nova.scheduler.client.report [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1014.500140] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d19f7d4ed696453e817d60a329b406b8 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1014.510235] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d19f7d4ed696453e817d60a329b406b8 [ 1014.848779] env[62109]: DEBUG nova.compute.manager [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Received event network-changed-8b395229-2200-402f-a201-d6593182bba4 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1014.848977] env[62109]: DEBUG nova.compute.manager [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Refreshing instance network info cache due to event network-changed-8b395229-2200-402f-a201-d6593182bba4. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1014.849283] env[62109]: DEBUG oslo_concurrency.lockutils [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] Acquiring lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.849439] env[62109]: DEBUG oslo_concurrency.lockutils [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] Acquired lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.849597] env[62109]: DEBUG nova.network.neutron [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Refreshing network info cache for port 8b395229-2200-402f-a201-d6593182bba4 {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1014.850016] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] Expecting reply to msg ae33c6d6a3204de0830c50fca9cde590 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1014.856467] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae33c6d6a3204de0830c50fca9cde590 [ 1014.894463] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 0766cbe1be85425da0845fc8a501dae3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1014.921809] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0766cbe1be85425da0845fc8a501dae3 [ 1014.931775] env[62109]: DEBUG nova.network.neutron [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1014.963150] env[62109]: DEBUG nova.network.neutron [-] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.963588] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 103d24958c704a47a14f9a9d4454614d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1014.972546] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 103d24958c704a47a14f9a9d4454614d [ 1014.987657] env[62109]: ERROR nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. [ 1014.987657] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 1014.987657] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1014.987657] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1014.987657] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1014.987657] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1014.987657] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1014.987657] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1014.987657] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1014.987657] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 1014.987657] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1014.987657] env[62109]: ERROR nova.compute.manager raise self.value [ 1014.987657] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1014.987657] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 1014.987657] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1014.987657] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1014.988287] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1014.988287] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1014.988287] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. [ 1014.988287] env[62109]: ERROR nova.compute.manager [ 1014.988287] env[62109]: Traceback (most recent call last): [ 1014.988287] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1014.988287] env[62109]: listener.cb(fileno) [ 1014.988287] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1014.988287] env[62109]: result = function(*args, **kwargs) [ 1014.988287] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1014.988287] env[62109]: return func(*args, **kwargs) [ 1014.988287] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1014.988287] env[62109]: raise e [ 1014.988287] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1014.988287] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 1014.988287] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1014.988287] env[62109]: created_port_ids = self._update_ports_for_instance( [ 1014.988287] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1014.988287] env[62109]: with excutils.save_and_reraise_exception(): [ 1014.988287] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1014.988287] env[62109]: self.force_reraise() [ 1014.988287] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1014.988287] env[62109]: raise self.value [ 1014.988287] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1014.988287] env[62109]: updated_port = self._update_port( [ 1014.988287] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1014.988287] env[62109]: _ensure_no_port_binding_failure(port) [ 1014.988287] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1014.988287] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 1014.989225] env[62109]: nova.exception.PortBindingFailed: Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. [ 1014.989225] env[62109]: Removing descriptor: 16 [ 1015.002128] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.626s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.002942] env[62109]: ERROR nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Traceback (most recent call last): [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self.driver.spawn(context, instance, image_meta, [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] vm_ref = self.build_virtual_machine(instance, [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] vif_infos = vmwarevif.get_vif_info(self._session, [ 1015.002942] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] for vif in network_info: [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] return self._sync_wrapper(fn, *args, **kwargs) [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self.wait() [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self[:] = self._gt.wait() [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] return self._exit_event.wait() [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] current.throw(*self._exc) [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1015.003368] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] result = function(*args, **kwargs) [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] return func(*args, **kwargs) [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] raise e [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] nwinfo = self.network_api.allocate_for_instance( [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] created_port_ids = self._update_ports_for_instance( [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] with excutils.save_and_reraise_exception(): [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] self.force_reraise() [ 1015.003741] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] raise self.value [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] updated_port = self._update_port( [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] _ensure_no_port_binding_failure(port) [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] raise exception.PortBindingFailed(port_id=port['id']) [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] nova.exception.PortBindingFailed: Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. [ 1015.004111] env[62109]: ERROR nova.compute.manager [instance: f6587428-62b2-4e71-a585-4f794c96d04e] [ 1015.004365] env[62109]: DEBUG nova.compute.utils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1015.006232] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Build of instance f6587428-62b2-4e71-a585-4f794c96d04e was re-scheduled: Binding failed for port 6801707a-9500-4be7-9efe-705062339599, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1015.006661] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1015.006892] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.007036] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.007195] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1015.007645] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg a4a0d58dbc6149c89571c94b59f5132b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1015.009011] env[62109]: DEBUG nova.network.neutron [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.009458] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] Expecting reply to msg e64d44455de6435e86e58a4e416e8454 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1015.013609] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4a0d58dbc6149c89571c94b59f5132b [ 1015.016604] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e64d44455de6435e86e58a4e416e8454 [ 1015.367594] env[62109]: DEBUG nova.network.neutron [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1015.397354] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1015.423654] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1015.423918] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1015.424089] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1015.424275] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1015.424420] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1015.424565] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1015.424769] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1015.424924] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1015.425108] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1015.425240] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1015.425413] env[62109]: DEBUG nova.virt.hardware [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1015.426284] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41e4a5c-065a-43b8-9c35-5f7221ee9b75 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.434783] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd4f44a-b4b7-473e-9561-381af90b8f4d {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.448386] env[62109]: ERROR nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Traceback (most recent call last): [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] yield resources [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self.driver.spawn(context, instance, image_meta, [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] vm_ref = self.build_virtual_machine(instance, [ 1015.448386] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] vif_infos = vmwarevif.get_vif_info(self._session, [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] for vif in network_info: [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] return self._sync_wrapper(fn, *args, **kwargs) [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self.wait() [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self[:] = self._gt.wait() [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] return self._exit_event.wait() [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1015.448826] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] current.throw(*self._exc) [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] result = function(*args, **kwargs) [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] return func(*args, **kwargs) [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] raise e [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] nwinfo = self.network_api.allocate_for_instance( [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] created_port_ids = self._update_ports_for_instance( [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] with excutils.save_and_reraise_exception(): [ 1015.449227] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self.force_reraise() [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] raise self.value [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] updated_port = self._update_port( [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] _ensure_no_port_binding_failure(port) [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] raise exception.PortBindingFailed(port_id=port['id']) [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] nova.exception.PortBindingFailed: Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. [ 1015.449625] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] [ 1015.449625] env[62109]: INFO nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Terminating instance [ 1015.450693] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.460896] env[62109]: DEBUG nova.network.neutron [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.461371] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] Expecting reply to msg 8b6cdbb39bbf4fad98eae77716ba5a67 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1015.469169] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b6cdbb39bbf4fad98eae77716ba5a67 [ 1015.470752] env[62109]: INFO nova.compute.manager [-] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Took 1.02 seconds to deallocate network for instance. [ 1015.472857] env[62109]: DEBUG nova.compute.claims [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1015.473034] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.473247] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.475142] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 1ee3d0f4b155408bbe472024147dbc6c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1015.513273] env[62109]: DEBUG oslo_concurrency.lockutils [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] Releasing lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.513521] env[62109]: DEBUG nova.compute.manager [req-350acb99-a0e2-4420-bb97-4e9c2fd3c533 req-63ef2b88-759d-4037-8afa-3de2d9502b78 service nova] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Received event network-vif-deleted-7ef816eb-00da-4479-b932-14b1601845cd {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1015.514196] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ee3d0f4b155408bbe472024147dbc6c [ 1015.527893] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1015.613390] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.613931] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 98c140a859dc45e7a18fe075fd2eae96 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1015.621791] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98c140a859dc45e7a18fe075fd2eae96 [ 1015.964155] env[62109]: DEBUG oslo_concurrency.lockutils [req-9db0eee8-fd25-4589-bf8b-5e8702469fe1 req-c21629e0-2785-4fbc-b5be-8bfb218bfd32 service nova] Releasing lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.964532] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.964722] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1015.965156] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 5dea69b3139142c1b0367f819f69794f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1015.972180] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dea69b3139142c1b0367f819f69794f [ 1016.033692] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63347322-dbe7-4efb-b895-75599cfb3ace {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.041220] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a6bdcf-a470-4894-8463-539975d0cc24 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.070214] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6420a7-492a-42f0-ae9f-b110be45abf9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.076861] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5fce36-d46f-4976-9c00-baf431f0a2ed {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.090167] env[62109]: DEBUG nova.compute.provider_tree [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.090647] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 2c50f929bc3f4dceabfa6bc0b63338a1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1016.097282] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c50f929bc3f4dceabfa6bc0b63338a1 [ 1016.115781] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-f6587428-62b2-4e71-a585-4f794c96d04e" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.116124] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1016.116448] env[62109]: DEBUG nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1016.116730] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1016.130966] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1016.131584] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg e98aafabf15e4ce6932bdd486de74dbf in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1016.139580] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e98aafabf15e4ce6932bdd486de74dbf [ 1016.492296] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1016.561604] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.562315] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 3d4d8a38a41749a0a0e3c1a3b8e4f927 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1016.570133] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d4d8a38a41749a0a0e3c1a3b8e4f927 [ 1016.593666] env[62109]: DEBUG nova.scheduler.client.report [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1016.596120] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg c01cdb34cce746068606aa927ab94dd3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1016.607838] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c01cdb34cce746068606aa927ab94dd3 [ 1016.633538] env[62109]: DEBUG nova.network.neutron [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.634088] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 66fcefa8cb1b49f98bd958105dc1e3de in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1016.641081] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66fcefa8cb1b49f98bd958105dc1e3de [ 1016.876578] env[62109]: DEBUG nova.compute.manager [req-bb3a17e0-a179-4080-aeb0-ee629298bd52 req-2e41f6bd-7e71-4601-8d3d-154712b807e1 service nova] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Received event network-vif-deleted-8b395229-2200-402f-a201-d6593182bba4 {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1017.064260] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.064958] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1017.065266] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1017.065729] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46340142-9279-416d-ae4a-54cfa37cfeb4 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.074564] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f913de4-0eea-494c-aa6c-9eaca7710378 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.095000] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600 could not be found. [ 1017.095334] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1017.095619] env[62109]: INFO nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1017.095989] env[62109]: DEBUG oslo.service.loopingcall [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1017.096409] env[62109]: DEBUG nova.compute.manager [-] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1017.096622] env[62109]: DEBUG nova.network.neutron [-] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1017.098651] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.625s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.099304] env[62109]: ERROR nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Traceback (most recent call last): [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self.driver.spawn(context, instance, image_meta, [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] vm_ref = self.build_virtual_machine(instance, [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] vif_infos = vmwarevif.get_vif_info(self._session, [ 1017.099304] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] for vif in network_info: [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] return self._sync_wrapper(fn, *args, **kwargs) [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self.wait() [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self[:] = self._gt.wait() [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] return self._exit_event.wait() [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] current.throw(*self._exc) [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1017.099719] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] result = function(*args, **kwargs) [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] return func(*args, **kwargs) [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] raise e [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] nwinfo = self.network_api.allocate_for_instance( [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] created_port_ids = self._update_ports_for_instance( [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] with excutils.save_and_reraise_exception(): [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] self.force_reraise() [ 1017.100155] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] raise self.value [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] updated_port = self._update_port( [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] _ensure_no_port_binding_failure(port) [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] raise exception.PortBindingFailed(port_id=port['id']) [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] nova.exception.PortBindingFailed: Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. [ 1017.100586] env[62109]: ERROR nova.compute.manager [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] [ 1017.101525] env[62109]: DEBUG nova.compute.utils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1017.103202] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Build of instance e34cf886-0f9b-4b9c-91c9-d04d9edb08e2 was re-scheduled: Binding failed for port 7ef816eb-00da-4479-b932-14b1601845cd, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1017.103751] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1017.104086] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquiring lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.104340] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Acquired lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.104595] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1017.105103] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 901efaae93b74791870fad483a3d913d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1017.111517] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 901efaae93b74791870fad483a3d913d [ 1017.122702] env[62109]: DEBUG nova.network.neutron [-] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1017.123278] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e5b590f3d697415789b25c07443f83fc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1017.129449] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5b590f3d697415789b25c07443f83fc [ 1017.135895] env[62109]: INFO nova.compute.manager [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: f6587428-62b2-4e71-a585-4f794c96d04e] Took 1.02 seconds to deallocate network for instance. [ 1017.137625] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d9732c2a8df9461b9c22d07d91658bdc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1017.167866] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9732c2a8df9461b9c22d07d91658bdc [ 1017.620934] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1017.625220] env[62109]: DEBUG nova.network.neutron [-] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.625724] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 258ef4a0cff54f9ba486b3970edaa634 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1017.633521] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 258ef4a0cff54f9ba486b3970edaa634 [ 1017.641746] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 9f2d203da30e4a4aaad4b573355f923a in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1017.669657] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f2d203da30e4a4aaad4b573355f923a [ 1017.688581] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.689199] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg bb0186aa5e804773b34bfa8330e2a088 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1017.696092] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb0186aa5e804773b34bfa8330e2a088 [ 1018.127892] env[62109]: INFO nova.compute.manager [-] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Took 1.03 seconds to deallocate network for instance. [ 1018.130641] env[62109]: DEBUG nova.compute.claims [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1018.130981] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.131330] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.133215] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 07284395a61a480483c5bcbf29971f31 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1018.164225] env[62109]: INFO nova.scheduler.client.report [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Deleted allocations for instance f6587428-62b2-4e71-a585-4f794c96d04e [ 1018.169853] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07284395a61a480483c5bcbf29971f31 [ 1018.170525] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg a542d31995444486bf7929f84c3f0d4b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1018.181747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a542d31995444486bf7929f84c3f0d4b [ 1018.191434] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Releasing lock "refresh_cache-e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.191641] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1018.191816] env[62109]: DEBUG nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1018.191976] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1018.209866] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1018.210386] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg f12663aa9ab14fb9afc4a7ec32c97d7f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1018.218216] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f12663aa9ab14fb9afc4a7ec32c97d7f [ 1018.671009] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22f335d-d11c-429c-b20e-15c51aa6124e {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.673780] env[62109]: DEBUG oslo_concurrency.lockutils [None req-c714cd2b-55ed-48ba-813e-5196605e1527 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "f6587428-62b2-4e71-a585-4f794c96d04e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.607s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.678693] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dbdbab-0aea-4088-ba29-a7d050685054 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.707437] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a868e81c-34a2-4dd9-b0d5-b14a6a7adb22 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.713157] env[62109]: DEBUG nova.network.neutron [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.713641] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg 6cf32c621a124a61b45b39c0fbe780c5 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1018.715353] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7975c272-3409-4924-b32c-2e2613144a86 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.728635] env[62109]: DEBUG nova.compute.provider_tree [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.729193] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 78328e4b3844411ca50c0d999e3996e7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1018.730756] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cf32c621a124a61b45b39c0fbe780c5 [ 1018.735573] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78328e4b3844411ca50c0d999e3996e7 [ 1019.219916] env[62109]: INFO nova.compute.manager [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] [instance: e34cf886-0f9b-4b9c-91c9-d04d9edb08e2] Took 1.03 seconds to deallocate network for instance. [ 1019.221825] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg f21b74f9ce64483c989a958bedad0a14 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1019.232199] env[62109]: DEBUG nova.scheduler.client.report [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1019.234473] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 1355cde1bfd4484181e916dd581b5629 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1019.248483] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1355cde1bfd4484181e916dd581b5629 [ 1019.262391] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f21b74f9ce64483c989a958bedad0a14 [ 1019.726942] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg d6736dc6e5834eeb930a3fb125fe95c0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1019.737181] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.606s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.737824] env[62109]: ERROR nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Traceback (most recent call last): [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self.driver.spawn(context, instance, image_meta, [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] vm_ref = self.build_virtual_machine(instance, [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] vif_infos = vmwarevif.get_vif_info(self._session, [ 1019.737824] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] for vif in network_info: [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] return self._sync_wrapper(fn, *args, **kwargs) [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self.wait() [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self[:] = self._gt.wait() [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] return self._exit_event.wait() [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] current.throw(*self._exc) [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1019.738286] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] result = function(*args, **kwargs) [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] return func(*args, **kwargs) [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] raise e [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] nwinfo = self.network_api.allocate_for_instance( [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] created_port_ids = self._update_ports_for_instance( [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] with excutils.save_and_reraise_exception(): [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] self.force_reraise() [ 1019.738670] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] raise self.value [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] updated_port = self._update_port( [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] _ensure_no_port_binding_failure(port) [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] raise exception.PortBindingFailed(port_id=port['id']) [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] nova.exception.PortBindingFailed: Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. [ 1019.739068] env[62109]: ERROR nova.compute.manager [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] [ 1019.739068] env[62109]: DEBUG nova.compute.utils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1019.740116] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Build of instance 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600 was re-scheduled: Binding failed for port 8b395229-2200-402f-a201-d6593182bba4, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1019.740503] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1019.740728] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquiring lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.740870] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Acquired lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.741031] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1019.741415] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 2ad309969a4d46f396dce029b18356d0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1019.755755] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ad309969a4d46f396dce029b18356d0 [ 1019.756373] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "25797537-a81b-47de-8d65-afe0d154f317" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.756586] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "25797537-a81b-47de-8d65-afe0d154f317" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.757370] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 9149c54f2fbf402491c386a63ab3f56d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1019.768254] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6736dc6e5834eeb930a3fb125fe95c0 [ 1019.769807] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9149c54f2fbf402491c386a63ab3f56d [ 1020.252201] env[62109]: INFO nova.scheduler.client.report [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Deleted allocations for instance e34cf886-0f9b-4b9c-91c9-d04d9edb08e2 [ 1020.258586] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Starting instance... {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1020.260172] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 4fc1553bf5f94f82ab5cfb18438171ec in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1020.261372] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Expecting reply to msg f5d0f11c0e2b4547b2817d2e8f07d425 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1020.262567] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1020.291958] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5d0f11c0e2b4547b2817d2e8f07d425 [ 1020.296186] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fc1553bf5f94f82ab5cfb18438171ec [ 1020.352794] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.353301] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 4659e354f50549b9802a5f47f8414a1b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1020.360687] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4659e354f50549b9802a5f47f8414a1b [ 1020.765385] env[62109]: DEBUG oslo_concurrency.lockutils [None req-5d3b4d71-fa17-4478-afb6-149a36645019 tempest-AttachVolumeNegativeTest-726024558 tempest-AttachVolumeNegativeTest-726024558-project-member] Lock "e34cf886-0f9b-4b9c-91c9-d04d9edb08e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.302s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.781425] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.781425] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.782514] env[62109]: INFO nova.compute.claims [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.784072] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 07432ce5f026442d8aaaca5739325b4b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1020.815879] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07432ce5f026442d8aaaca5739325b4b [ 1020.855339] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Releasing lock "refresh_cache-8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.855722] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1020.855722] env[62109]: DEBUG nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1020.855875] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1020.870466] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1020.870984] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg c14a543aaff14462bc1fbc9e7d35e65c in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1020.876993] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c14a543aaff14462bc1fbc9e7d35e65c [ 1021.288062] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg c49144a400ff488d84ee964b47f922c1 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1021.295713] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c49144a400ff488d84ee964b47f922c1 [ 1021.372723] env[62109]: DEBUG nova.network.neutron [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.373141] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg e1d7e3f68b2848759acd6325334cd151 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1021.381401] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1d7e3f68b2848759acd6325334cd151 [ 1021.841286] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b892478-615f-46c7-bd00-8747b347b6af {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.849197] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f38c46-889a-4629-beec-e356d6fe2ead {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.882902] env[62109]: INFO nova.compute.manager [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] [instance: 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600] Took 1.03 seconds to deallocate network for instance. [ 1021.884413] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg fb3517c4db9c43e1bc397893db084e26 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1021.886567] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74693eff-fcb9-4bf8-b0d5-66e802b13b3c {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.898259] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b485bca-d1f8-4a5d-8828-7c1f26abc3d3 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.914690] env[62109]: DEBUG nova.compute.provider_tree [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.915228] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 94c0bcb219cc40248f493c93e19ccfd9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1021.921836] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94c0bcb219cc40248f493c93e19ccfd9 [ 1021.927540] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb3517c4db9c43e1bc397893db084e26 [ 1022.390497] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 2970ca17fa674fd986ed0302cedf97b7 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1022.418799] env[62109]: DEBUG nova.scheduler.client.report [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1022.422956] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg da58a321b9b547b6bcb4b001694af031 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1022.433408] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da58a321b9b547b6bcb4b001694af031 [ 1022.438992] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2970ca17fa674fd986ed0302cedf97b7 [ 1022.919259] env[62109]: INFO nova.scheduler.client.report [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Deleted allocations for instance 8bfbf340-da8b-4ce7-8fb3-6804f7bbc600 [ 1022.925397] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Expecting reply to msg 0318fb692c1d49559fc9f2a4c79e932b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1022.927071] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.927639] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Start building networks asynchronously for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1022.929257] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg c7fc5e646d12495aad59f3193077140d in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1022.938192] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0318fb692c1d49559fc9f2a4c79e932b [ 1022.983410] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7fc5e646d12495aad59f3193077140d [ 1023.427238] env[62109]: DEBUG oslo_concurrency.lockutils [None req-1beace66-4f8d-4644-bd15-0ed816ec9aa5 tempest-ServersTestJSON-851829591 tempest-ServersTestJSON-851829591-project-member] Lock "8bfbf340-da8b-4ce7-8fb3-6804f7bbc600" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.022s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.431586] env[62109]: DEBUG nova.compute.utils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Using /dev/sd instead of None {{(pid=62109) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1023.432223] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 5980f52bf541490896e9a52cf3af2c4b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1023.433156] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Allocating IP information in the background. {{(pid=62109) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1023.433325] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] allocate_for_instance() {{(pid=62109) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1023.447627] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5980f52bf541490896e9a52cf3af2c4b [ 1023.477193] env[62109]: DEBUG nova.policy [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d003a5943d141cc9165e43148dec381', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a669cc62964b05bdaa71442c08a566', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62109) authorize /opt/stack/nova/nova/policy.py:203}} [ 1023.727038] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Successfully created port: e35385bf-a80e-4160-96db-c2a15a6554ef {{(pid=62109) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.937576] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Start building block device mappings for instance. {{(pid=62109) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1023.939363] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 6395340427234673b4d9b8f4846969aa in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1023.970221] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6395340427234673b4d9b8f4846969aa [ 1024.355655] env[62109]: DEBUG nova.compute.manager [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Received event network-changed-e35385bf-a80e-4160-96db-c2a15a6554ef {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1024.355848] env[62109]: DEBUG nova.compute.manager [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Refreshing instance network info cache due to event network-changed-e35385bf-a80e-4160-96db-c2a15a6554ef. {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1024.356091] env[62109]: DEBUG oslo_concurrency.lockutils [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] Acquiring lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.356235] env[62109]: DEBUG oslo_concurrency.lockutils [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] Acquired lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.356392] env[62109]: DEBUG nova.network.neutron [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Refreshing network info cache for port e35385bf-a80e-4160-96db-c2a15a6554ef {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1024.356870] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] Expecting reply to msg 2edebbd0dd54495ba7022b3f6c82ae58 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1024.364097] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2edebbd0dd54495ba7022b3f6c82ae58 [ 1024.445286] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 808a6814b95648e89d3a6b1207b4a5ca in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1024.509276] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 808a6814b95648e89d3a6b1207b4a5ca [ 1024.611502] env[62109]: ERROR nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. [ 1024.611502] env[62109]: ERROR nova.compute.manager Traceback (most recent call last): [ 1024.611502] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1024.611502] env[62109]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1024.611502] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1024.611502] env[62109]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1024.611502] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1024.611502] env[62109]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1024.611502] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1024.611502] env[62109]: ERROR nova.compute.manager self.force_reraise() [ 1024.611502] env[62109]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1024.611502] env[62109]: ERROR nova.compute.manager raise self.value [ 1024.611502] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1024.611502] env[62109]: ERROR nova.compute.manager updated_port = self._update_port( [ 1024.611502] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1024.611502] env[62109]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1024.612344] env[62109]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1024.612344] env[62109]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1024.612344] env[62109]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. [ 1024.612344] env[62109]: ERROR nova.compute.manager [ 1024.612779] env[62109]: Traceback (most recent call last): [ 1024.612958] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1024.612958] env[62109]: listener.cb(fileno) [ 1024.613115] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1024.613115] env[62109]: result = function(*args, **kwargs) [ 1024.613225] env[62109]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1024.613225] env[62109]: return func(*args, **kwargs) [ 1024.613324] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1024.613324] env[62109]: raise e [ 1024.613425] env[62109]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1024.613425] env[62109]: nwinfo = self.network_api.allocate_for_instance( [ 1024.613524] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1024.613524] env[62109]: created_port_ids = self._update_ports_for_instance( [ 1024.613631] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1024.613631] env[62109]: with excutils.save_and_reraise_exception(): [ 1024.613733] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1024.613733] env[62109]: self.force_reraise() [ 1024.613832] env[62109]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1024.613832] env[62109]: raise self.value [ 1024.613930] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1024.613930] env[62109]: updated_port = self._update_port( [ 1024.614028] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1024.614028] env[62109]: _ensure_no_port_binding_failure(port) [ 1024.614140] env[62109]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1024.614140] env[62109]: raise exception.PortBindingFailed(port_id=port['id']) [ 1024.614251] env[62109]: nova.exception.PortBindingFailed: Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. [ 1024.614318] env[62109]: Removing descriptor: 19 [ 1024.879120] env[62109]: DEBUG nova.network.neutron [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1024.949451] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Start spawning the instance on the hypervisor. {{(pid=62109) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1024.975035] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-21T07:11:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-21T07:11:33Z,direct_url=,disk_format='vmdk',id=4800b6ec-9841-4c82-b42e-97cce3beeec5,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6686ebd4d8b94da0877b56330ae2885f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-21T07:11:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1024.975534] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1024.975849] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image limits 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.976225] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Flavor pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1024.976528] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Image pref 0:0:0 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.976819] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62109) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1024.977323] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1024.977652] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1024.977964] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Got 1 possible topologies {{(pid=62109) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1024.978306] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1024.978626] env[62109]: DEBUG nova.virt.hardware [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62109) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1024.980094] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85c6bc4-8189-4433-861d-63753518d6b9 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.988791] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e0a59b-5739-4aa9-ba8b-72ba79e2a9fe {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.995377] env[62109]: DEBUG nova.network.neutron [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.995989] env[62109]: INFO oslo_messaging._drivers.amqpdriver [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] Expecting reply to msg 1cdc09f16e3e4feea456bc7627c5e5fe in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1025.009237] env[62109]: ERROR nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] Traceback (most recent call last): [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] yield resources [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self.driver.spawn(context, instance, image_meta, [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] vm_ref = self.build_virtual_machine(instance, [ 1025.009237] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] vif_infos = vmwarevif.get_vif_info(self._session, [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] for vif in network_info: [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] return self._sync_wrapper(fn, *args, **kwargs) [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self.wait() [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self[:] = self._gt.wait() [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] return self._exit_event.wait() [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1025.009688] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] current.throw(*self._exc) [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] result = function(*args, **kwargs) [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] return func(*args, **kwargs) [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] raise e [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] nwinfo = self.network_api.allocate_for_instance( [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] created_port_ids = self._update_ports_for_instance( [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] with excutils.save_and_reraise_exception(): [ 1025.010122] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self.force_reraise() [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] raise self.value [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] updated_port = self._update_port( [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] _ensure_no_port_binding_failure(port) [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] raise exception.PortBindingFailed(port_id=port['id']) [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] nova.exception.PortBindingFailed: Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. [ 1025.010583] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] [ 1025.011013] env[62109]: INFO nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Terminating instance [ 1025.012770] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cdc09f16e3e4feea456bc7627c5e5fe [ 1025.013354] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.498488] env[62109]: DEBUG oslo_concurrency.lockutils [req-97fb207b-1170-4471-b145-6168695cf9be req-068711e5-e802-4f51-82a8-d3f17ba4a965 service nova] Releasing lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.499364] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.499364] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1025.500030] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg c94a81ca7cb0472b87e0bca3da0796dc in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1025.509297] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c94a81ca7cb0472b87e0bca3da0796dc [ 1026.017227] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1026.088121] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.088648] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 297fdabd2b2a47e8954f27629912e603 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1026.096427] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 297fdabd2b2a47e8954f27629912e603 [ 1026.381267] env[62109]: DEBUG nova.compute.manager [req-7d5468bb-1a67-4e80-8b8b-8953a7710965 req-67bcf901-1729-4dd2-a3cc-213d46805a9d service nova] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Received event network-vif-deleted-e35385bf-a80e-4160-96db-c2a15a6554ef {{(pid=62109) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1026.591266] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.591694] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Start destroying the instance on the hypervisor. {{(pid=62109) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1026.591890] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Destroying instance {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.592222] env[62109]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9aae130d-bc13-4c66-b381-c1e279a1944a {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.601182] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff1a9641-76de-48bb-b1ca-a556286f96e7 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.621729] env[62109]: WARNING nova.virt.vmwareapi.vmops [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 25797537-a81b-47de-8d65-afe0d154f317 could not be found. [ 1026.621878] env[62109]: DEBUG nova.virt.vmwareapi.vmops [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Instance destroyed {{(pid=62109) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1026.622054] env[62109]: INFO nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1026.622286] env[62109]: DEBUG oslo.service.loopingcall [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62109) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.622503] env[62109]: DEBUG nova.compute.manager [-] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1026.622597] env[62109]: DEBUG nova.network.neutron [-] [instance: 25797537-a81b-47de-8d65-afe0d154f317] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1026.636300] env[62109]: DEBUG nova.network.neutron [-] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1026.636770] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e82c0b875ff54a14b14c9f0e56b6ac94 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1026.643940] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e82c0b875ff54a14b14c9f0e56b6ac94 [ 1027.139231] env[62109]: DEBUG nova.network.neutron [-] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.139747] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e1df45323d15489895b21cefd23dd9d9 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1027.147383] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1df45323d15489895b21cefd23dd9d9 [ 1027.642335] env[62109]: INFO nova.compute.manager [-] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Took 1.02 seconds to deallocate network for instance. [ 1027.644737] env[62109]: DEBUG nova.compute.claims [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Aborting claim: {{(pid=62109) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1027.644912] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.645125] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.647154] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 2dea5d17e1da4deb86715eb59851bdf0 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1027.680778] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dea5d17e1da4deb86715eb59851bdf0 [ 1028.181869] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cc698f-6019-4b79-906b-b5ff16cf70cc {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.190057] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55799a6a-f7e2-42ae-a5ee-9464442d0c5b {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.224135] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e840e511-3451-4bc0-b08b-1251435c1465 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.232212] env[62109]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ca22e7-8a04-4305-b4df-e4de5db55ef5 {{(pid=62109) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.245231] env[62109]: DEBUG nova.compute.provider_tree [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed in ProviderTree for provider: 5d099501-5ecf-4ee9-ac08-22024ac3c80e {{(pid=62109) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.245740] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 26bc1331e003460fb47ac332f4e1d8a2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1028.254628] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26bc1331e003460fb47ac332f4e1d8a2 [ 1028.748466] env[62109]: DEBUG nova.scheduler.client.report [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Inventory has not changed for provider 5d099501-5ecf-4ee9-ac08-22024ac3c80e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 124, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62109) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1028.750882] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 701598ddca844ac0b38c895548ed9242 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1028.763438] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 701598ddca844ac0b38c895548ed9242 [ 1029.254151] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.609s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.254862] env[62109]: ERROR nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] Traceback (most recent call last): [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self.driver.spawn(context, instance, image_meta, [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] vm_ref = self.build_virtual_machine(instance, [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] vif_infos = vmwarevif.get_vif_info(self._session, [ 1029.254862] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] for vif in network_info: [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] return self._sync_wrapper(fn, *args, **kwargs) [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self.wait() [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self[:] = self._gt.wait() [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] return self._exit_event.wait() [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] current.throw(*self._exc) [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1029.255391] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] result = function(*args, **kwargs) [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] return func(*args, **kwargs) [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] raise e [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] nwinfo = self.network_api.allocate_for_instance( [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] created_port_ids = self._update_ports_for_instance( [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] with excutils.save_and_reraise_exception(): [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] self.force_reraise() [ 1029.255940] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] raise self.value [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] updated_port = self._update_port( [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] _ensure_no_port_binding_failure(port) [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] raise exception.PortBindingFailed(port_id=port['id']) [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] nova.exception.PortBindingFailed: Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. [ 1029.256544] env[62109]: ERROR nova.compute.manager [instance: 25797537-a81b-47de-8d65-afe0d154f317] [ 1029.256544] env[62109]: DEBUG nova.compute.utils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. {{(pid=62109) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1029.257221] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Build of instance 25797537-a81b-47de-8d65-afe0d154f317 was re-scheduled: Binding failed for port e35385bf-a80e-4160-96db-c2a15a6554ef, please check neutron logs for more information. {{(pid=62109) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1029.257775] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Unplugging VIFs for instance {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1029.258005] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquiring lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.258216] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Acquired lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.258443] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Building network info cache for instance {{(pid=62109) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1029.258852] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 74fba509f36849e789d1844b951e22c2 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1029.266013] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74fba509f36849e789d1844b951e22c2 [ 1029.774697] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.853201] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.853748] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg cf0069bc63ff45b18e5a244c9ecd0e03 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1029.862288] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf0069bc63ff45b18e5a244c9ecd0e03 [ 1030.356428] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Releasing lock "refresh_cache-25797537-a81b-47de-8d65-afe0d154f317" {{(pid=62109) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.356824] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62109) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1030.356868] env[62109]: DEBUG nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Deallocating network for instance {{(pid=62109) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1030.357024] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] deallocate_for_instance() {{(pid=62109) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1030.374339] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Instance cache missing network info. {{(pid=62109) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1030.374910] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 53922c6978484445be73266808dd26c3 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1030.381593] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53922c6978484445be73266808dd26c3 [ 1030.877675] env[62109]: DEBUG nova.network.neutron [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Updating instance_info_cache with network_info: [] {{(pid=62109) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.878262] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg 10e7c42f5ec14133ac9a5ff19261825b in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1030.886210] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10e7c42f5ec14133ac9a5ff19261825b [ 1031.381028] env[62109]: INFO nova.compute.manager [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] [instance: 25797537-a81b-47de-8d65-afe0d154f317] Took 1.02 seconds to deallocate network for instance. [ 1031.383558] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg ce8ca6681fec4c0da88f7382be1ebb06 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1031.416369] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce8ca6681fec4c0da88f7382be1ebb06 [ 1031.889036] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg d958e42d0bae49bf85d51f0ebc27b936 in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1031.918209] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d958e42d0bae49bf85d51f0ebc27b936 [ 1032.410379] env[62109]: INFO nova.scheduler.client.report [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Deleted allocations for instance 25797537-a81b-47de-8d65-afe0d154f317 [ 1032.416221] env[62109]: INFO oslo_messaging._drivers.amqpdriver [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Expecting reply to msg e12bdd06720549b6b4b7a94ba50a478f in queue reply_7522b64acfeb4981b1f36928b040d568 [ 1032.429536] env[62109]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e12bdd06720549b6b4b7a94ba50a478f [ 1032.918742] env[62109]: DEBUG oslo_concurrency.lockutils [None req-222bfe20-21ee-4646-b8b7-c9761d55a2f6 tempest-DeleteServersTestJSON-377525300 tempest-DeleteServersTestJSON-377525300-project-member] Lock "25797537-a81b-47de-8d65-afe0d154f317" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.162s {{(pid=62109) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}